text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# coding=utf-8
import pygame
import pygame.locals
class Board(object):
"""
Plansza do gry. Odpowiada za rysowanie okna gry.
"""
def __init__(self, width, height):
"""
Konstruktor planszy do gry. Przygotowuje okienko gry.
:param width: szerokość w pikselach
:param height: wysokość w pikselach
"""
self.surface = pygame.display.set_mode((width, height), 0, 32)
pygame.display.set_caption('Game of life')
def draw(self, *args):
"""
Rysuje okno gry
:param args: lista obiektów do narysowania
"""
background = (0, 0, 0)
self.surface.fill(background)
for drawable in args:
drawable.draw_on(self.surface)
# dopiero w tym miejscu następuje fatyczne rysowanie
# w oknie gry, wcześniej tylko ustalaliśmy co i jak ma zostać narysowane
pygame.display.update()
class GameOfLife(object):
"""
Łączy wszystkie elementy gry w całość.
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowanie ustawień gry
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
pygame.init()
self.board = Board(width * cell_size, height * cell_size)
# zegar którego użyjemy do kontrolowania szybkości rysowania
# kolejnych klatek gry
self.fps_clock = pygame.time.Clock()
def run(self):
"""
Główna pętla gry
"""
while not self.handle_events():
# działaj w pętli do momentu otrzymania sygnału do wyjścia
self.board.draw()
self.fps_clock.tick(15)
def handle_events(self):
"""
Obsługa zdarzeń systemowych, tutaj zinterpretujemy np. ruchy myszką
:return True jeżeli pygame przekazał zdarzenie wyjścia z gry
"""
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
return True
# magiczne liczby używane do określenia czy komórka jest żywa
DEAD = 0
ALIVE = 1
class Population(object):
"""
Populacja komórek
"""
def __init__(self, width, height, cell_size=10):
"""
Przygotowuje ustawienia populacji
:param width: szerokość planszy mierzona liczbą komórek
:param height: wysokość planszy mierzona liczbą komórek
:param cell_size: bok komórki w pikselach
"""
self.box_size = cell_size
self.height = height
self.width = width
self.generation = self.reset_generation()
def reset_generation(self):
"""
Tworzy i zwraca macierz pustej populacji
"""
# w pętli wypełnij listę kolumnami
# które także w pętli zostają wypełnione wartością 0 (DEAD)
return [[DEAD for y in xrange(self.height)] for x in xrange(self.width)]
def handle_mouse(self):
# pobierz stan guzików myszki z wykorzystaniem funcji pygame
buttons = pygame.mouse.get_pressed()
if not any(buttons):
# ignoruj zdarzenie jeśli żaden z guzików nie jest wciśnięty
return
# dodaj żywą komórką jeśli wciśnięty jest pierwszy guzik myszki
# będziemy mogli nie tylko dodawać żywe komórki ale także je usuwać
alive = True if buttons[0] else False
# pobierz pozycję kursora na planszy mierzoną w pikselach
x, y = pygame.mouse.get_pos()
# przeliczamy współrzędne komórki z pikseli na współrzędne komórki w macierz
# gracz może kliknąć w kwadracie o szerokości box_size by wybrać komórkę
x /= self.box_size
y /= self.box_size
# ustaw stan komórki na macierzy
self.generation[x][y] = ALIVE if alive else DEAD
def draw_on(self, surface):
"""
Rysuje komórki na planszy
"""
for x, y in self.alive_cells():
size = (self.box_size, self.box_size)
position = (x * self.box_size, y * self.box_size)
color = (255, 255, 255)
thickness = 1
pygame.draw.rect(surface, color, pygame.locals.Rect(position, size), thickness)
def alive_cells(self):
"""
Generator zwracający współrzędne żywych komórek.
"""
for x in range(len(self.generation)):
column = self.generation[x]
for y in range(len(column)):
if column[y] == ALIVE:
# jeśli komórka jest żywa zwrócimy jej współrzędne
yield x, y
# Ta część powinna być zawsze na końcu modułu (ten plik jest modułem)
# chcemy uruchomić naszą grę dopiero po tym jak wszystkie klasy zostaną zadeklarowane
if __name__ == "__main__":
game = GameOfLife(80, 40)
game.run()
| roninek/python101 | docs/pygame/life/code1a.py | Python | mit | 5,039 | 0.001628 |
"""index_artifacts
Revision ID: 340d5cc7e806
Revises: af3f4bdc27d1
Create Date: 2019-08-09 12:37:50.706914
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "340d5cc7e806"
down_revision = "af3f4bdc27d1"
branch_labels = ()
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_index(
"idx_artifact_job", "artifact", ["repository_id", "job_id"], unique=False
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("idx_artifact_job", table_name="artifact")
# ### end Alembic commands ###
| getsentry/zeus | zeus/migrations/340d5cc7e806_index_artifacts.py | Python | apache-2.0 | 705 | 0.001418 |
import os
import numpy as np
def save_weights(layers, weights_dir, epoch):
for idx in range(len(layers)):
if hasattr(layers[idx], 'W'):
layers[idx].W.save_weight(
weights_dir, 'W' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W0'):
layers[idx].W0.save_weight(
weights_dir, 'W0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W1'):
layers[idx].W1.save_weight(
weights_dir, 'W1' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b'):
layers[idx].b.save_weight(
weights_dir, 'b' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b0'):
layers[idx].b0.save_weight(
weights_dir, 'b0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b1'):
layers[idx].b1.save_weight(
weights_dir, 'b1' + '_' + str(idx) + '_' + str(epoch))
def load_weights(layers, weights_dir, epoch):
for idx in range(len(layers)):
if hasattr(layers[idx], 'W'):
layers[idx].W.load_weight(
weights_dir, 'W' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W0'):
layers[idx].W0.load_weight(
weights_dir, 'W0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'W1'):
layers[idx].W1.load_weight(
weights_dir, 'W1' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b'):
layers[idx].b.load_weight(
weights_dir, 'b' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b0'):
layers[idx].b0.load_weight(
weights_dir, 'b0' + '_' + str(idx) + '_' + str(epoch))
if hasattr(layers[idx], 'b1'):
layers[idx].b1.load_weight(
weights_dir, 'b1' + '_' + str(idx) + '_' + str(epoch))
def save_momentums(vels, weights_dir, epoch):
for ind in range(len(vels)):
np.save(os.path.join(weights_dir, 'mom_' + str(ind) + '_' + str(epoch)),
vels[ind].get_value())
def load_momentums(vels, weights_dir, epoch):
for ind in range(len(vels)):
vels[ind].set_value(np.load(os.path.join(
weights_dir, 'mom_' + str(ind) + '_' + str(epoch) + '.npy')))
| myt00seven/svrg | para_gpu/tools.py | Python | mit | 2,391 | 0.000836 |
# -*- coding: utf-8 -*-
# Copyright (C) 2009 Canonical
#
# Authors:
# Michael Vogt
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import GObject, Gtk, Gdk
import datetime
import gettext
import logging
import os
import json
import sys
import tempfile
import time
import threading
# py3
try:
from urllib.request import urlopen
urlopen # pyflakes
from queue import Queue
Queue # pyflakes
except ImportError:
# py2 fallbacks
from urllib import urlopen
from Queue import Queue
from gettext import gettext as _
from softwarecenter.backend.ubuntusso import get_ubuntu_sso_backend
import piston_mini_client
from softwarecenter.paths import SOFTWARE_CENTER_CONFIG_DIR
from softwarecenter.enums import Icons, SOFTWARE_CENTER_NAME_KEYRING
from softwarecenter.config import get_config
from softwarecenter.distro import get_distro, get_current_arch
from softwarecenter.backend.login_sso import get_sso_backend
from softwarecenter.backend.reviews import Review
from softwarecenter.db.database import Application
from softwarecenter.gwibber_helper import GwibberHelper, GwibberHelperMock
from softwarecenter.i18n import get_language
from softwarecenter.ui.gtk3.SimpleGtkbuilderApp import SimpleGtkbuilderApp
from softwarecenter.ui.gtk3.dialogs import SimpleGtkbuilderDialog
from softwarecenter.ui.gtk3.widgets.stars import ReactiveStar
from softwarecenter.utils import make_string_from_list, utf8
from softwarecenter.backend.piston.rnrclient import RatingsAndReviewsAPI
from softwarecenter.backend.piston.rnrclient_pristine import ReviewRequest
# get current distro and set default server root
distro = get_distro()
SERVER_ROOT = distro.REVIEWS_SERVER
# server status URL
SERVER_STATUS_URL = SERVER_ROOT + "/server-status/"
class UserCancelException(Exception):
""" user pressed cancel """
pass
TRANSMIT_STATE_NONE = "transmit-state-none"
TRANSMIT_STATE_INPROGRESS = "transmit-state-inprogress"
TRANSMIT_STATE_DONE = "transmit-state-done"
TRANSMIT_STATE_ERROR = "transmit-state-error"
class GRatingsAndReviews(GObject.GObject):
""" Access ratings&reviews API as a gobject """
__gsignals__ = {
# send when a transmit is started
"transmit-start": (GObject.SIGNAL_RUN_LAST,
GObject.TYPE_NONE,
(GObject.TYPE_PYOBJECT, ),
),
# send when a transmit was successful
"transmit-success": (GObject.SIGNAL_RUN_LAST,
GObject.TYPE_NONE,
(GObject.TYPE_PYOBJECT, ),
),
# send when a transmit failed
"transmit-failure": (GObject.SIGNAL_RUN_LAST,
GObject.TYPE_NONE,
(GObject.TYPE_PYOBJECT, str),
),
}
def __init__(self, token):
super(GRatingsAndReviews, self).__init__()
# piston worker thread
self.worker_thread = Worker(token)
self.worker_thread.start()
GObject.timeout_add(500,
self._check_thread_status,
None)
def submit_review(self, review):
self.emit("transmit-start", review)
self.worker_thread.pending_reviews.put(review)
def report_abuse(self, review_id, summary, text):
self.emit("transmit-start", review_id)
self.worker_thread.pending_reports.put((int(review_id), summary, text))
def submit_usefulness(self, review_id, is_useful):
self.emit("transmit-start", review_id)
self.worker_thread.pending_usefulness.put((int(review_id), is_useful))
def modify_review(self, review_id, review):
self.emit("transmit-start", review_id)
self.worker_thread.pending_modify.put((int(review_id), review))
def delete_review(self, review_id):
self.emit("transmit-start", review_id)
self.worker_thread.pending_delete.put(int(review_id))
def server_status(self):
self.worker_thread.pending_server_status()
def shutdown(self):
self.worker_thread.shutdown()
# internal
def _check_thread_status(self, data):
if self.worker_thread._transmit_state == TRANSMIT_STATE_DONE:
self.emit("transmit-success", "")
self.worker_thread._transmit_state = TRANSMIT_STATE_NONE
elif self.worker_thread._transmit_state == TRANSMIT_STATE_ERROR:
self.emit("transmit-failure", "",
self.worker_thread._transmit_error_str)
self.worker_thread._transmit_state = TRANSMIT_STATE_NONE
return True
class Worker(threading.Thread):
def __init__(self, token):
# init parent
threading.Thread.__init__(self)
self.pending_reviews = Queue()
self.pending_reports = Queue()
self.pending_usefulness = Queue()
self.pending_modify = Queue()
self.pending_delete = Queue()
self.pending_server_status = Queue()
self._shutdown = False
# FIXME: instead of a binary value we need the state associated
# with each request from the queue
self._transmit_state = TRANSMIT_STATE_NONE
self._transmit_error_str = ""
self.display_name = "No display name"
auth = piston_mini_client.auth.OAuthAuthorizer(token["token"],
token["token_secret"], token["consumer_key"],
token["consumer_secret"])
# change default server to the SSL one
distro = get_distro()
service_root = distro.REVIEWS_SERVER
self.rnrclient = RatingsAndReviewsAPI(service_root=service_root,
auth=auth)
def run(self):
"""Main thread run interface, logs into launchpad and waits
for commands
"""
logging.debug("worker thread run")
# loop
self._wait_for_commands()
def shutdown(self):
"""Request shutdown"""
self._shutdown = True
def _wait_for_commands(self):
"""internal helper that waits for commands"""
while True:
#logging.debug("worker: _wait_for_commands")
self._submit_reviews_if_pending()
self._submit_reports_if_pending()
self._submit_usefulness_if_pending()
self._submit_modify_if_pending()
self._submit_delete_if_pending()
time.sleep(0.2)
if (self._shutdown and
self.pending_reviews.empty() and
self.pending_usefulness.empty() and
self.pending_reports.empty() and
self.pending_modify.empty() and
self.pending_delete.empty()):
return
def _submit_usefulness_if_pending(self):
""" the actual usefulness function """
while not self.pending_usefulness.empty():
logging.debug("POST usefulness")
self._transmit_state = TRANSMIT_STATE_INPROGRESS
(review_id, is_useful) = self.pending_usefulness.get()
try:
res = self.rnrclient.submit_usefulness(
review_id=review_id, useful=str(is_useful))
self._transmit_state = TRANSMIT_STATE_DONE
sys.stdout.write(json.dumps(res))
except Exception as e:
logging.exception("submit_usefulness failed")
err_str = self._get_error_messages(e)
self._transmit_error_str = err_str
self._write_exception_html_log_if_needed(e)
self._transmit_state = TRANSMIT_STATE_ERROR
self.pending_usefulness.task_done()
def _submit_modify_if_pending(self):
""" the actual modify function """
while not self.pending_modify.empty():
logging.debug("_modify_review")
self._transmit_state = TRANSMIT_STATE_INPROGRESS
(review_id, review) = self.pending_modify.get()
summary = review['summary']
review_text = review['review_text']
rating = review['rating']
try:
res = self.rnrclient.modify_review(review_id=review_id,
summary=summary,
review_text=review_text,
rating=rating)
self._transmit_state = TRANSMIT_STATE_DONE
sys.stdout.write(json.dumps(vars(res)))
except Exception as e:
logging.exception("modify_review")
err_str = self._get_error_messages(e)
self._write_exception_html_log_if_needed(e)
self._transmit_state = TRANSMIT_STATE_ERROR
self._transmit_error_str = err_str
self.pending_modify.task_done()
def _submit_delete_if_pending(self):
""" the actual deletion """
while not self.pending_delete.empty():
logging.debug("POST delete")
self._transmit_state = TRANSMIT_STATE_INPROGRESS
review_id = self.pending_delete.get()
try:
res = self.rnrclient.delete_review(review_id=review_id)
self._transmit_state = TRANSMIT_STATE_DONE
sys.stdout.write(json.dumps(res))
except Exception as e:
logging.exception("delete_review failed")
self._write_exception_html_log_if_needed(e)
self._transmit_error_str = _("Failed to delete review")
self._transmit_state = TRANSMIT_STATE_ERROR
self.pending_delete.task_done()
def _submit_reports_if_pending(self):
""" the actual report function """
while not self.pending_reports.empty():
logging.debug("POST report")
self._transmit_state = TRANSMIT_STATE_INPROGRESS
(review_id, summary, text) = self.pending_reports.get()
try:
res = self.rnrclient.flag_review(review_id=review_id,
reason=summary,
text=text)
self._transmit_state = TRANSMIT_STATE_DONE
sys.stdout.write(json.dumps(res))
except Exception as e:
logging.exception("flag_review failed")
err_str = self._get_error_messages(e)
self._transmit_error_str = err_str
self._write_exception_html_log_if_needed(e)
self._transmit_state = TRANSMIT_STATE_ERROR
self.pending_reports.task_done()
def _write_exception_html_log_if_needed(self, e):
# write out a "oops.html"
if type(e) is piston_mini_client.APIError:
f = tempfile.NamedTemporaryFile(
prefix="sc_submit_oops_", suffix=".html", delete=False)
# new piston-mini-client has only the body of the returned data
# older just pushes it into a big string
if hasattr(e, "body") and e.body:
f.write(e.body)
else:
f.write(str(e))
# reviews
def queue_review(self, review):
""" queue a new review for sending to LP """
logging.debug("queue_review %s" % review)
self.pending_reviews.put(review)
def _submit_reviews_if_pending(self):
""" the actual submit function """
while not self.pending_reviews.empty():
logging.debug("_submit_review")
self._transmit_state = TRANSMIT_STATE_INPROGRESS
review = self.pending_reviews.get()
piston_review = ReviewRequest()
piston_review.package_name = review.app.pkgname
piston_review.app_name = review.app.appname
piston_review.summary = review.summary
piston_review.version = review.package_version
piston_review.review_text = review.text
piston_review.date = str(review.date)
piston_review.rating = review.rating
piston_review.language = review.language
piston_review.arch_tag = get_current_arch()
piston_review.origin = review.origin
piston_review.distroseries = distro.get_codename()
try:
res = self.rnrclient.submit_review(review=piston_review)
self._transmit_state = TRANSMIT_STATE_DONE
# output the resulting ReviewDetails object as json so
# that the parent can read it
sys.stdout.write(json.dumps(vars(res)))
except Exception as e:
logging.exception("submit_review")
err_str = self._get_error_messages(e)
self._write_exception_html_log_if_needed(e)
self._transmit_state = TRANSMIT_STATE_ERROR
self._transmit_error_str = err_str
self.pending_reviews.task_done()
def _get_error_messages(self, e):
if type(e) is piston_mini_client.APIError:
try:
logging.warning(e.body)
error_msg = json.loads(e.body)['errors']
errs = error_msg["__all__"]
err_str = _("Server's response was:")
for err in errs:
err_str = _("%s\n%s") % (err_str, err)
except:
err_str = _("Unknown error communicating with server. "
"Check your log and consider raising a bug report "
"if this problem persists")
logging.warning(e)
else:
err_str = _("Unknown error communicating with server. Check "
"your log and consider raising a bug report if this "
"problem persists")
logging.warning(e)
return err_str
def verify_server_status(self):
""" verify that the server we want to talk to can be reached
this method should be overriden if clients talk to a different
server than rnr
"""
try:
resp = urlopen(SERVER_STATUS_URL).read()
if resp != "ok":
return False
except Exception as e:
logging.error("exception from '%s': '%s'" % (SERVER_STATUS_URL, e))
return False
return True
class BaseApp(SimpleGtkbuilderApp):
def __init__(self, datadir, uifile):
SimpleGtkbuilderApp.__init__(
self, os.path.join(datadir, "ui/gtk3", uifile), "software-center")
# generic data
self.token = None
self.display_name = None
self._login_successful = False
self._whoami_token_reset_nr = 0
#persistent config
configfile = os.path.join(
SOFTWARE_CENTER_CONFIG_DIR, "submit_reviews.cfg")
self.config = get_config(configfile)
# status spinner
self.status_spinner = Gtk.Spinner()
self.status_spinner.set_size_request(32, 32)
self.login_spinner_vbox.pack_start(self.status_spinner, False, False,
0)
self.login_spinner_vbox.reorder_child(self.status_spinner, 0)
self.status_spinner.show()
#submit status spinner
self.submit_spinner = Gtk.Spinner()
self.submit_spinner.set_size_request(*Gtk.icon_size_lookup(
Gtk.IconSize.SMALL_TOOLBAR)[:2])
#submit error image
self.submit_error_img = Gtk.Image()
self.submit_error_img.set_from_stock(Gtk.STOCK_DIALOG_ERROR,
Gtk.IconSize.SMALL_TOOLBAR)
#submit success image
self.submit_success_img = Gtk.Image()
self.submit_success_img.set_from_stock(Gtk.STOCK_APPLY,
Gtk.IconSize.SMALL_TOOLBAR)
#submit warn image
self.submit_warn_img = Gtk.Image()
self.submit_warn_img.set_from_stock(Gtk.STOCK_DIALOG_INFO,
Gtk.IconSize.SMALL_TOOLBAR)
#label size to prevent image or spinner from resizing
self.label_transmit_status.set_size_request(-1,
Gtk.icon_size_lookup(Gtk.IconSize.SMALL_TOOLBAR)[1])
def _get_parent_xid_for_login_window(self):
# no get_xid() yet in gir world
#return self.submit_window.get_window().get_xid()
return ""
def run(self):
# initially display a 'Connecting...' page
self.main_notebook.set_current_page(0)
self.login_status_label.set_markup(_(u"Signing in\u2026"))
self.status_spinner.start()
self.submit_window.show()
# now run the loop
self.login()
def quit(self, exitcode=0):
sys.exit(exitcode)
def _add_spellcheck_to_textview(self, textview):
""" adds a spellchecker (if available) to the given Gtk.textview """
pass
#~ try:
#~ import gtkspell
#~ # mvo: gtkspell.get_from_text_view() is broken, so we use this
#~ # method instead, the second argument is the language to
#~ # use (that is directly passed to pspell)
#~ spell = gtkspell.Spell(textview, None)
#~ except:
#~ return
#~ return spell
def login(self, show_register=True):
logging.debug("login()")
login_window_xid = self._get_parent_xid_for_login_window()
help_text = _("To review software or to report abuse you need to "
"sign in to a Ubuntu Single Sign-On account.")
self.sso = get_sso_backend(login_window_xid,
SOFTWARE_CENTER_NAME_KEYRING, help_text)
self.sso.connect("login-successful", self._maybe_login_successful)
self.sso.connect("login-canceled", self._login_canceled)
if show_register:
self.sso.login_or_register()
else:
self.sso.login()
def _login_canceled(self, sso):
self.status_spinner.hide()
self.login_status_label.set_markup(
'<b><big>%s</big></b>' % _("Login was canceled"))
def _maybe_login_successful(self, sso, oauth_result):
"""called after we have the token, then we go and figure out our
name
"""
logging.debug("_maybe_login_successful")
self.token = oauth_result
self.ssoapi = get_ubuntu_sso_backend()
self.ssoapi.connect("whoami", self._whoami_done)
self.ssoapi.connect("error", self._whoami_error)
# this will automatically verify the token and retrigger login
# if its expired
self.ssoapi.whoami()
def _whoami_done(self, ssologin, result):
logging.debug("_whoami_done")
self.display_name = result["displayname"]
self._create_gratings_api()
self.login_successful(self.display_name)
def _whoami_error(self, ssologin, e):
logging.error("whoami error '%s'" % e)
# show error
self.status_spinner.hide()
self.login_status_label.set_markup(
'<b><big>%s</big></b>' % _("Failed to log in"))
def login_successful(self, display_name):
""" callback when the login was successful """
pass
def on_button_cancel_clicked(self, button=None):
# bring it down gracefully
if hasattr(self, "api"):
self.api.shutdown()
while Gtk.events_pending():
Gtk.main_iteration()
self.quit(1)
def _create_gratings_api(self):
self.api = GRatingsAndReviews(self.token)
self.api.connect("transmit-start", self.on_transmit_start)
self.api.connect("transmit-success", self.on_transmit_success)
self.api.connect("transmit-failure", self.on_transmit_failure)
def on_transmit_start(self, api, trans):
self.button_post.set_sensitive(False)
self.button_cancel.set_sensitive(False)
self._change_status("progress", _(self.SUBMIT_MESSAGE))
def on_transmit_success(self, api, trans):
self.api.shutdown()
self.quit()
def on_transmit_failure(self, api, trans, error):
self._change_status("fail", error)
self.button_post.set_sensitive(True)
self.button_cancel.set_sensitive(True)
def _change_status(self, type, message):
"""method to separate the updating of status icon/spinner and
message in the submit review window, takes a type (progress,
fail, success, clear, warning) as a string and a message
string then updates status area accordingly
"""
self._clear_status_imagery()
self.label_transmit_status.set_text("")
if type == "progress":
self.status_hbox.pack_start(self.submit_spinner, False, False, 0)
self.status_hbox.reorder_child(self.submit_spinner, 0)
self.submit_spinner.show()
self.submit_spinner.start()
self.label_transmit_status.set_text(message)
elif type == "fail":
self.status_hbox.pack_start(self.submit_error_img, False, False, 0)
self.status_hbox.reorder_child(self.submit_error_img, 0)
self.submit_error_img.show()
self.label_transmit_status.set_text(_(self.FAILURE_MESSAGE))
self.error_textview.get_buffer().set_text(_(message))
self.detail_expander.show()
elif type == "success":
self.status_hbox.pack_start(self.submit_success_img, False, False,
0)
self.status_hbox.reorder_child(self.submit_success_img, 0)
self.submit_success_img.show()
self.label_transmit_status.set_text(message)
elif type == "warning":
self.status_hbox.pack_start(self.submit_warn_img, False, False, 0)
self.status_hbox.reorder_child(self.submit_warn_img, 0)
self.submit_warn_img.show()
self.label_transmit_status.set_text(message)
def _clear_status_imagery(self):
self.detail_expander.hide()
self.detail_expander.set_expanded(False)
#clears spinner or error image from dialog submission label
# before trying to display one or the other
if self.submit_spinner.get_parent():
self.status_hbox.remove(self.submit_spinner)
if self.submit_error_img.get_window():
self.status_hbox.remove(self.submit_error_img)
if self.submit_success_img.get_window():
self.status_hbox.remove(self.submit_success_img)
if self.submit_warn_img.get_window():
self.status_hbox.remove(self.submit_warn_img)
class SubmitReviewsApp(BaseApp):
""" review a given application or package """
STAR_SIZE = (32, 32)
APP_ICON_SIZE = 48
#character limits for text boxes and hurdles for indicator changes
# (overall field maximum, limit to display warning, limit to change
# colour)
SUMMARY_CHAR_LIMITS = (80, 60, 70)
REVIEW_CHAR_LIMITS = (5000, 4900, 4950)
#alert colours for character warning labels
NORMAL_COLOUR = "000000"
ERROR_COLOUR = "FF0000"
SUBMIT_MESSAGE = _("Submitting Review")
FAILURE_MESSAGE = _("Failed to submit review")
SUCCESS_MESSAGE = _("Review submitted")
def __init__(self, app, version, iconname, origin, parent_xid, datadir,
action="submit", review_id=0):
BaseApp.__init__(self, datadir, "submit_review.ui")
self.datadir = datadir
# legal fineprint, do not change without consulting a lawyer
msg = _("By submitting this review, you agree not to include "
"anything defamatory, infringing, or illegal. Canonical "
"may, at its discretion, publish your name and review in "
"Ubuntu Software Center and elsewhere, and allow the "
"software or content author to publish it too.")
self.label_legal_fineprint.set_markup(
'<span size="x-small">%s</span>' % msg)
# additional icons come from app-install-data
self.icons = Gtk.IconTheme.get_default()
self.icons.append_search_path("/usr/share/app-install/icons/")
self.submit_window.connect("destroy", self.on_button_cancel_clicked)
self._add_spellcheck_to_textview(self.textview_review)
self.star_rating = ReactiveStar()
alignment = Gtk.Alignment.new(0.0, 0.5, 1.0, 1.0)
alignment.set_padding(3, 3, 3, 3)
alignment.add(self.star_rating)
self.star_rating.set_size_as_pixel_value(36)
self.star_caption = Gtk.Label()
alignment.show_all()
self.rating_hbox.pack_start(alignment, True, True, 0)
self.rating_hbox.reorder_child(alignment, 0)
self.rating_hbox.pack_start(self.star_caption, False, False, 0)
self.rating_hbox.reorder_child(self.star_caption, 1)
self.review_buffer = self.textview_review.get_buffer()
self.detail_expander.hide()
self.retrieve_api = RatingsAndReviewsAPI()
# data
self.app = app
self.version = version
self.origin = origin
self.iconname = iconname
self.action = action
self.review_id = int(review_id)
# parent xid
#~ if parent_xid:
#~ win = Gdk.Window.foreign_new(int(parent_xid))
#~ wnck_get_xid_from_pid(os.getpid())
#~ win = ''
#~ self.review_buffer.set_text(str(win))
#~ if win:
#~ self.submit_window.realize()
#~ self.submit_window.get_window().set_transient_for(win)
self.submit_window.set_position(Gtk.WindowPosition.MOUSE)
self._confirm_cancel_yes_handler = 0
self._confirm_cancel_no_handler = 0
self._displaying_cancel_confirmation = False
self.submit_window.connect("key-press-event", self._on_key_press_event)
self.review_summary_entry.connect('changed',
self._on_mandatory_text_entry_changed)
self.star_rating.connect('changed', self._on_mandatory_fields_changed)
self.review_buffer.connect('changed', self._on_text_entry_changed)
# gwibber stuff
self.gwibber_combo = Gtk.ComboBoxText.new()
#cells = self.gwibber_combo.get_cells()
#cells[0].set_property("ellipsize", pango.ELLIPSIZE_END)
self.gwibber_hbox.pack_start(self.gwibber_combo, True, True, 0)
if "SOFTWARE_CENTER_GWIBBER_MOCK_USERS" in os.environ:
self.gwibber_helper = GwibberHelperMock()
else:
self.gwibber_helper = GwibberHelper()
# get a dict with a saved gwibber_send (boolean) and gwibber
# account_id for persistent state
self.gwibber_prefs = self._get_gwibber_prefs()
# gwibber stuff
self._setup_gwibber_gui()
#now setup rest of app based on whether submit or modify
if self.action == "submit":
self._init_submit()
elif self.action == "modify":
self._init_modify()
def _init_submit(self):
self.submit_window.set_title(_("Review %s") %
gettext.dgettext("app-install-data", self.app.name))
def _init_modify(self):
self._populate_review()
self.submit_window.set_title(_("Modify Your %(appname)s Review") % {
'appname': gettext.dgettext("app-install-data", self.app.name)})
self.button_post.set_label(_("Modify"))
self.SUBMIT_MESSAGE = _("Updating your review")
self.FAILURE_MESSAGE = _("Failed to edit review")
self.SUCCESS_MESSAGE = _("Review updated")
self._enable_or_disable_post_button()
def _populate_review(self):
try:
review_data = self.retrieve_api.get_review(
review_id=self.review_id)
app = Application(appname=review_data.app_name,
pkgname=review_data.package_name)
self.app = app
self.review_summary_entry.set_text(review_data.summary)
self.star_rating.set_rating(review_data.rating)
self.review_buffer.set_text(review_data.review_text)
# save original review field data, for comparison purposes when
# user makes changes to fields
self.orig_summary_text = review_data.summary
self.orig_star_rating = review_data.rating
self.orig_review_text = review_data.review_text
self.version = review_data.version
self.origin = review_data.origin
except piston_mini_client.APIError:
logging.warn(
'Unable to retrieve review id %s for editing. Exiting' %
self.review_id)
self.quit(2)
def _setup_details(self, widget, app, iconname, version, display_name):
# icon shazam
try:
icon = self.icons.load_icon(iconname, self.APP_ICON_SIZE, 0)
except:
icon = self.icons.load_icon(Icons.MISSING_APP, self.APP_ICON_SIZE,
0)
self.review_appicon.set_from_pixbuf(icon)
# title
app = utf8(gettext.dgettext("app-install-data", app.name))
version = utf8(version)
self.review_title.set_markup(
'<b><span size="x-large">%s</span></b>\n%s' % (app, version))
# review label
self.review_label.set_markup(_('Review by: %s') %
display_name.encode('utf8'))
# review summary label
self.review_summary_label.set_markup(_('Summary:'))
#rating label
self.rating_label.set_markup(_('Rating:'))
#error detail link label
self.label_expander.set_markup('<small><u>%s</u></small>' %
(_('Error Details')))
def _has_user_started_reviewing(self):
summary_chars = self.review_summary_entry.get_text_length()
review_chars = self.review_buffer.get_char_count()
return summary_chars > 0 or review_chars > 0
def _on_mandatory_fields_changed(self, *args):
self._enable_or_disable_post_button()
def _on_mandatory_text_entry_changed(self, widget):
self._check_summary_character_count()
self._on_mandatory_fields_changed(widget)
def _on_text_entry_changed(self, widget):
self._check_review_character_count()
self._on_mandatory_fields_changed(widget)
def _enable_or_disable_post_button(self):
summary_chars = self.review_summary_entry.get_text_length()
review_chars = self.review_buffer.get_char_count()
if (summary_chars and summary_chars <= self.SUMMARY_CHAR_LIMITS[0] and
review_chars and review_chars <= self.REVIEW_CHAR_LIMITS[0] and
int(self.star_rating.get_rating()) > 0):
self.button_post.set_sensitive(True)
self._change_status("clear", "")
else:
self.button_post.set_sensitive(False)
self._change_status("clear", "")
# set post button insensitive, if review being modified is the same
# as what is currently in the UI fields checks if 'original' review
# attributes exist to avoid exceptions when this method has been
# called prior to review being retrieved
if self.action == 'modify' and hasattr(self, "orig_star_rating"):
if self._modify_review_is_the_same():
self.button_post.set_sensitive(False)
self._change_status("warning", _("Can't submit unmodified"))
else:
self._change_status("clear", "")
def _modify_review_is_the_same(self):
"""checks if review fields are the same as the review being modified
and returns True if so
"""
# perform an initial check on character counts to return False if any
# don't match, avoids doing unnecessary string comparisons
if (self.review_summary_entry.get_text_length() !=
len(self.orig_summary_text) or
self.review_buffer.get_char_count() != len(self.orig_review_text)):
return False
#compare rating
if self.star_rating.get_rating() != self.orig_star_rating:
return False
#compare summary text
if (self.review_summary_entry.get_text().decode('utf-8') !=
self.orig_summary_text):
return False
#compare review text
if (self.review_buffer.get_text(
self.review_buffer.get_start_iter(),
self.review_buffer.get_end_iter(),
include_hidden_chars=False).decode('utf-8') !=
self.orig_review_text):
return False
return True
def _check_summary_character_count(self):
summary_chars = self.review_summary_entry.get_text_length()
if summary_chars > self.SUMMARY_CHAR_LIMITS[1] - 1:
markup = self._get_fade_colour_markup(
self.NORMAL_COLOUR, self.ERROR_COLOUR,
self.SUMMARY_CHAR_LIMITS[2], self.SUMMARY_CHAR_LIMITS[0],
summary_chars)
self.summary_char_label.set_markup(markup)
else:
self.summary_char_label.set_text('')
def _check_review_character_count(self):
review_chars = self.review_buffer.get_char_count()
if review_chars > self.REVIEW_CHAR_LIMITS[1] - 1:
markup = self._get_fade_colour_markup(
self.NORMAL_COLOUR, self.ERROR_COLOUR,
self.REVIEW_CHAR_LIMITS[2], self.REVIEW_CHAR_LIMITS[0],
review_chars)
self.review_char_label.set_markup(markup)
else:
self.review_char_label.set_text('')
def _get_fade_colour_markup(self, full_col, empty_col, cmin, cmax, curr):
"""takes two colours as well as a minimum and maximum value then
fades one colour into the other based on the proportion of the
current value between the min and max
returns a pango color string
"""
markup = '<span fgcolor="#%s">%s</span>'
if curr > cmax:
return markup % (empty_col, str(cmax - curr))
elif curr <= cmin: # saves division by 0 later if cmin == cmax
return markup % (full_col, str(cmax - curr))
else:
#distance between min and max values to fade colours
scale = cmax - cmin
#percentage to fade colour by, based on current number of chars
percentage = (curr - cmin) / float(scale)
full_rgb = self._convert_html_to_rgb(full_col)
empty_rgb = self._convert_html_to_rgb(empty_col)
#calc changes to each of the r g b values to get the faded colour
red_change = full_rgb[0] - empty_rgb[0]
green_change = full_rgb[1] - empty_rgb[1]
blue_change = full_rgb[2] - empty_rgb[2]
new_red = int(full_rgb[0] - (percentage * red_change))
new_green = int(full_rgb[1] - (percentage * green_change))
new_blue = int(full_rgb[2] - (percentage * blue_change))
return_color = self._convert_rgb_to_html(new_red, new_green,
new_blue)
return markup % (return_color, str(cmax - curr))
def _convert_html_to_rgb(self, html):
r = html[0:2]
g = html[2:4]
b = html[4:6]
return (int(r, 16), int(g, 16), int(b, 16))
def _convert_rgb_to_html(self, r, g, b):
return "%s%s%s" % ("%02X" % r,
"%02X" % g,
"%02X" % b)
def on_button_post_clicked(self, button):
logging.debug("enter_review ok button")
review = Review(self.app)
text_buffer = self.textview_review.get_buffer()
review.text = text_buffer.get_text(text_buffer.get_start_iter(),
text_buffer.get_end_iter(),
False) # include_hidden_chars
review.summary = self.review_summary_entry.get_text()
review.date = datetime.datetime.now()
review.language = get_language()
review.rating = int(self.star_rating.get_rating())
review.package_version = self.version
review.origin = self.origin
if self.action == "submit":
self.api.submit_review(review)
elif self.action == "modify":
changes = {'review_text': review.text,
'summary': review.summary,
'rating': review.rating}
self.api.modify_review(self.review_id, changes)
def login_successful(self, display_name):
self.main_notebook.set_current_page(1)
self._setup_details(self.submit_window, self.app,
self.iconname, self.version, display_name)
self.textview_review.grab_focus()
def _setup_gwibber_gui(self):
self.gwibber_accounts = self.gwibber_helper.accounts()
list_length = len(self.gwibber_accounts)
if list_length == 0:
self._on_no_gwibber_accounts()
elif list_length == 1:
self._on_one_gwibber_account()
else:
self._on_multiple_gwibber_accounts()
def _get_gwibber_prefs(self):
if self.config.has_option("reviews", "gwibber_send"):
send = self.config.getboolean("reviews", "gwibber_send")
else:
send = False
if self.config.has_option("reviews", "account_id"):
account_id = self.config.get("reviews", "account_id")
else:
account_id = False
return {
"gwibber_send": send,
"account_id": account_id
}
def _on_no_gwibber_accounts(self):
self.gwibber_hbox.hide()
self.gwibber_checkbutton.set_active(False)
def _on_one_gwibber_account(self):
account = self.gwibber_accounts[0]
self.gwibber_hbox.show()
self.gwibber_combo.hide()
from softwarecenter.utils import utf8
acct_text = utf8(_("Also post this review to %s (@%s)")) % (
utf8(account['service'].capitalize()), utf8(account['username']))
self.gwibber_checkbutton.set_label(acct_text)
# simplifies on_transmit_successful later
self.gwibber_combo.append_text(acct_text)
self.gwibber_combo.set_active(0)
# auto select submit via gwibber checkbutton if saved prefs say True
self.gwibber_checkbutton.set_active(self.gwibber_prefs['gwibber_send'])
def _on_multiple_gwibber_accounts(self):
self.gwibber_hbox.show()
self.gwibber_combo.show()
# setup accounts combo
self.gwibber_checkbutton.set_label(_("Also post this review to: "))
for account in self.gwibber_accounts:
acct_text = "%s (@%s)" % (
account['service'].capitalize(), account['username'])
self.gwibber_combo.append_text(acct_text)
# add "all" to both combo and accounts (the later is only pseudo)
self.gwibber_combo.append_text(_("All my Gwibber services"))
self.gwibber_accounts.append({"id": "pseudo-sc-all"})
# reapply preferences
self.gwibber_checkbutton.set_active(self.gwibber_prefs['gwibber_send'])
gwibber_active_account = 0
for account in self.gwibber_accounts:
if account['id'] == self.gwibber_prefs['account_id']:
gwibber_active_account = self.gwibber_accounts.index(account)
self.gwibber_combo.set_active(gwibber_active_account)
def _post_to_one_gwibber_account(self, msg, account):
""" little helper to facilitate posting message to twitter account
passed in
"""
status_text = _("Posting to %s") % utf8(
account['service'].capitalize())
self._change_status("progress", status_text)
return self.gwibber_helper.send_message(msg, account['id'])
def on_transmit_success(self, api, trans):
"""on successful submission of a review, try to send to gwibber as
well
"""
self._run_gwibber_submits(api, trans)
def _on_key_press_event(self, widget, event):
if event.keyval == Gdk.KEY_Escape:
self._confirm_cancellation()
def _confirm_cancellation(self):
if (self._has_user_started_reviewing() and not
self._displaying_cancel_confirmation):
def do_cancel(widget):
self.submit_window.destroy()
self.quit()
def undo_cancel(widget):
self._displaying_cancel_confirmation = False
self.response_hbuttonbox.set_visible(True)
self.main_notebook.set_current_page(1)
self.response_hbuttonbox.set_visible(False)
self.confirm_cancel_yes.grab_focus()
self.main_notebook.set_current_page(2)
self._displaying_cancel_confirmation = True
if not self._confirm_cancel_yes_handler:
tag = self.confirm_cancel_yes.connect("clicked", do_cancel)
self._confirm_cancel_yes_handler = tag
if not self._confirm_cancel_no_handler:
tag = self.confirm_cancel_no.connect("clicked", undo_cancel)
self._confirm_cancel_no_handler = tag
else:
self.submit_window.destroy()
self.quit()
def _get_send_accounts(self, sel_index):
"""return the account referenced by the passed in index, or all
accounts if the index of the combo points to the pseudo-sc-all
string
"""
if self.gwibber_accounts[sel_index]["id"] == "pseudo-sc-all":
return self.gwibber_accounts
else:
return [self.gwibber_accounts[sel_index]]
def _submit_to_gwibber(self, msg, send_accounts):
"""for each send_account passed in, try to submit to gwibber
then return a list of accounts that failed to submit (empty list
if all succeeded)
"""
#list of gwibber accounts that failed to submit, used later to allow
# selective re-send if user desires
failed_accounts = []
for account in send_accounts:
if account["id"] != "pseudo-sc-all":
if not self._post_to_one_gwibber_account(msg, account):
failed_accounts.append(account)
return failed_accounts
def _run_gwibber_submits(self, api, trans):
"""check if gwibber send should occur and send via gwibber if so"""
gwibber_success = True
using_gwibber = self.gwibber_checkbutton.get_active()
if using_gwibber:
i = self.gwibber_combo.get_active()
msg = (self._gwibber_message())
send_accounts = self._get_send_accounts(i)
self._save_gwibber_state(True, self.gwibber_accounts[i]['id'])
#tries to send to gwibber, and gets back any failed accounts
failed_accounts = self._submit_to_gwibber(msg, send_accounts)
if len(failed_accounts) > 0:
gwibber_success = False
#FIXME: send an error string to this method instead of empty
# string
self._on_gwibber_fail(api, trans, failed_accounts, "")
else:
# prevent _save_gwibber_state from overwriting the account id
# in config if the checkbutton was not selected
self._save_gwibber_state(False, None)
# run parent handler on gwibber success, otherwise this will be dealt
# with in _on_gwibber_fail
if gwibber_success:
self._success_status()
BaseApp.on_transmit_success(self, api, trans)
def _gwibber_retry_some(self, api, trans, accounts):
""" perform selective retrying of gwibber posting, using only
accounts passed in
"""
gwibber_success = True
failed_accounts = []
msg = (self._gwibber_message())
for account in accounts:
if not self._post_to_one_gwibber_account(msg, account):
failed_accounts.append(account)
gwibber_success = False
if not gwibber_success:
#FIXME: send an error string to this method instead of empty string
self._on_gwibber_fail(api, trans, failed_accounts, "")
else:
self._success_status()
BaseApp.on_transmit_success(self, api, trans)
def _success_status(self):
"""Updates status area to show success for 2 seconds then allows
window to proceed
"""
self._change_status("success", _(self.SUCCESS_MESSAGE))
while Gtk.events_pending():
Gtk.main_iteration()
time.sleep(2)
def _on_gwibber_fail(self, api, trans, failed_accounts, error):
self._change_status("fail", _("Problems posting to Gwibber"))
#list to hold service strings in the format: "Service (@username)"
failed_services = []
for account in failed_accounts:
failed_services.append("%s (@%s)" % (
account['service'].capitalize(), account['username']))
glade_dialog = SimpleGtkbuilderDialog(self.datadir,
domain="software-center")
dialog = glade_dialog.dialog_gwibber_error
dialog.set_transient_for(self.submit_window)
# build the failure string
# TRANSLATORS: the part in %s can either be a single entry
# like "facebook" or a string like
# "factbook and twister"
error_str = gettext.ngettext(
"There was a problem posting this review to %s.",
"There was a problem posting this review to %s.",
len(failed_services))
error_str = make_string_from_list(error_str, failed_services)
dialog.set_markup(error_str)
dialog.format_secondary_text(error)
result = dialog.run()
dialog.destroy()
if result == Gtk.RESPONSE_ACCEPT:
self._gwibber_retry_some(api, trans, failed_accounts)
else:
BaseApp.on_transmit_success(self, api, trans)
def _save_gwibber_state(self, gwibber_send, account_id):
if not self.config.has_section("reviews"):
self.config.add_section("reviews")
self.config.set("reviews", "gwibber_send", str(gwibber_send))
if account_id:
self.config.set("reviews", "account_id", account_id)
self.config.write()
def _gwibber_message(self, max_len=140):
""" build a gwibber message of max_len"""
def _gwibber_message_string_from_data(appname, rating, summary, link):
""" helper so that we do not duplicate the "reviewed..." string """
return _("reviewed %(appname)s in Ubuntu: %(rating)s "
"%(summary)s %(link)s") % {
'appname': appname,
'rating': rating,
'summary': summary,
'link': link}
rating = self.star_rating.get_rating()
rating_string = ''
#fill star ratings for string
for i in range(1, 6):
if i <= rating:
rating_string = rating_string + u"\u2605"
else:
rating_string = rating_string + u"\u2606"
review_summary_text = self.review_summary_entry.get_text()
# FIXME: currently the link is not useful (at all) for most
# people not runnig ubuntu
#app_link = "http://apt.ubuntu.com/p/%s" % self.app.pkgname
app_link = ""
gwib_msg = _gwibber_message_string_from_data(
self.app.name, rating_string, review_summary_text, app_link)
#check char count and ellipsize review summary if larger than 140 chars
if len(gwib_msg) > max_len:
chars_to_reduce = len(gwib_msg) - (max_len - 1)
new_char_count = len(review_summary_text) - chars_to_reduce
review_summary_text = (review_summary_text[:new_char_count] +
u"\u2026")
gwib_msg = _gwibber_message_string_from_data(
self.app.name, rating_string, review_summary_text, app_link)
return gwib_msg
class ReportReviewApp(BaseApp):
""" report a given application or package """
APP_ICON_SIZE = 48
SUBMIT_MESSAGE = _(u"Sending report\u2026")
FAILURE_MESSAGE = _("Failed to submit report")
def __init__(self, review_id, parent_xid, datadir):
BaseApp.__init__(self, datadir, "report_abuse.ui")
# status
self._add_spellcheck_to_textview(self.textview_report)
## make button sensitive when textview has content
self.textview_report.get_buffer().connect(
"changed", self._enable_or_disable_report_button)
# data
self.review_id = review_id
# title
self.submit_window.set_title(_("Flag as Inappropriate"))
# parent xid
#if parent_xid:
# #win = Gtk.gdk.window_foreign_new(int(parent_xid))
# if win:
# self.submit_window.realize()
# self.submit_window.window.set_transient_for(win)
# mousepos
self.submit_window.set_position(Gtk.WindowPosition.MOUSE)
# simple APIs ftw!
self.combobox_report_summary = Gtk.ComboBoxText.new()
self.report_body_vbox.pack_start(self.combobox_report_summary, False,
False, 0)
self.report_body_vbox.reorder_child(self.combobox_report_summary, 2)
self.combobox_report_summary.show()
for term in [_(u"Please make a selection\u2026"),
# TRANSLATORS: The following is one entry in a combobox that is
# located directly beneath a label asking 'Why is this review
# inappropriate?'.
# This text refers to a possible reason for why the corresponding
# review is being flagged as inappropriate.
_("Offensive language"),
# TRANSLATORS: The following is one entry in a combobox that is
# located directly beneath a label asking 'Why is this review
# inappropriate?'.
# This text refers to a possible reason for why the corresponding
# review is being flagged as inappropriate.
_("Infringes copyright"),
# TRANSLATORS: The following is one entry in a combobox that is
# located directly beneath a label asking 'Why is this review
# inappropriate?'.
# This text refers to a possible reason for why the corresponding
# review is being flagged as inappropriate.
_("Contains inaccuracies"),
# TRANSLATORS: The following is one entry in a combobox that is
# located directly beneath a label asking 'Why is this review
# inappropriate?'.
# This text refers to a possible reason for why the corresponding
# review is being flagged as inappropriate.
_("Other")]:
self.combobox_report_summary.append_text(term)
self.combobox_report_summary.set_active(0)
self.combobox_report_summary.connect(
"changed", self._enable_or_disable_report_button)
def _enable_or_disable_report_button(self, widget):
if (self.textview_report.get_buffer().get_char_count() > 0 and
self.combobox_report_summary.get_active() != 0):
self.button_post.set_sensitive(True)
else:
self.button_post.set_sensitive(False)
def _setup_details(self, widget, display_name):
# report label
self.report_label.set_markup(_('Please give details:'))
# review summary label
self.report_summary_label.set_markup(
_('Why is this review inappropriate?'))
#error detail link label
self.label_expander.set_markup('<small><u>%s</u></small>'
% (_('Error Details')))
def on_button_post_clicked(self, button):
logging.debug("report_abuse ok button")
report_summary = self.combobox_report_summary.get_active_text()
text_buffer = self.textview_report.get_buffer()
report_text = text_buffer.get_text(text_buffer.get_start_iter(),
text_buffer.get_end_iter(),
include_hidden_chars=False)
self.api.report_abuse(self.review_id, report_summary, report_text)
def login_successful(self, display_name):
logging.debug("login_successful")
self.main_notebook.set_current_page(1)
#self.label_reporter.set_text(display_name)
self._setup_details(self.submit_window, display_name)
class SubmitUsefulnessApp(BaseApp):
SUBMIT_MESSAGE = _(u"Sending usefulness\u2026")
def __init__(self, review_id, parent_xid, is_useful, datadir):
BaseApp.__init__(self, datadir, "submit_usefulness.ui")
# data
self.review_id = review_id
self.is_useful = bool(is_useful)
# no UI except for error conditions
self.parent_xid = parent_xid
# override behavior of baseapp here as we don't actually
# have a UI by default
def _get_parent_xid_for_login_window(self):
return self.parent_xid
def login_successful(self, display_name):
logging.debug("submit usefulness")
self.main_notebook.set_current_page(1)
self.api.submit_usefulness(self.review_id, self.is_useful)
def on_transmit_failure(self, api, trans, error):
logging.warn("exiting - error: %s" % error)
self.api.shutdown()
self.quit(2)
# override parents run to only trigger login (and subsequent
# events) but no UI, if this is commented out, there is some
# stub ui that can be useful for testing
def run(self):
self.login()
# override UI update methods from BaseApp to prevent them
# causing errors if called when UI is hidden
def _clear_status_imagery(self):
pass
def _change_status(self, type, message):
pass
class DeleteReviewApp(BaseApp):
SUBMIT_MESSAGE = _(u"Deleting review\u2026")
FAILURE_MESSAGE = _("Failed to delete review")
def __init__(self, review_id, parent_xid, datadir):
# uses same UI as submit usefulness because
# (a) it isn't shown and (b) it's similar in usage
BaseApp.__init__(self, datadir, "submit_usefulness.ui")
# data
self.review_id = review_id
# no UI except for error conditions
self.parent_xid = parent_xid
# override behavior of baseapp here as we don't actually
# have a UI by default
def _get_parent_xid_for_login_window(self):
return self.parent_xid
def login_successful(self, display_name):
logging.debug("delete review")
self.main_notebook.set_current_page(1)
self.api.delete_review(self.review_id)
def on_transmit_failure(self, api, trans, error):
logging.warn("exiting - error: %s" % error)
self.api.shutdown()
self.quit(2)
# override parents run to only trigger login (and subsequent
# events) but no UI, if this is commented out, there is some
# stub ui that can be useful for testing
def run(self):
self.login()
# override UI update methods from BaseApp to prevent them
# causing errors if called when UI is hidden
def _clear_status_imagery(self):
pass
def _change_status(self, type, message):
pass
| gusDuarte/software-center-5.2 | softwarecenter/ui/gtk3/review_gui_helper.py | Python | lgpl-3.0 | 55,851 | 0.00222 |
#!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/AcademySoftwareFoundation/OpenShadingLanguage
######################
#Uniform result
##########################
#Uniform subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern -o cout uu_out.tif")
#Uniform subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern -o cout uv_out.tif")
#Varying subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern -o cout vu_out.tif")
#Varying subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern -o cout vv_out.tif")
##################
#Varying result
##################
#Uniform subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern_vr -o cout uu_vr_out.tif")
#Uniform subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern_vr -o cout uv_vr_out.tif")
#Varying subject, uniform pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern_vr -o cout vu_vr_out.tif")
#Varying subject, varying pattern#
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern_vr -o cout vv_vr_out.tif")
##########################################
#Uniform result array
##########################################
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern_ura -o cout uu_ura_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern_ura -o cout uv_ura_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern_ura -o cout vu_ura_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern_ura -o cout vv_ura_out.tif")
##########################################
#Varying result array
##########################################
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_u_pattern_vra -o cout uu_vra_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 u_subj_v_pattern_vra -o cout uv_vra_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_u_pattern_vra -o cout vu_vra_out.tif")
command += testshade("-t 1 -g 64 64 -od uint8 v_subj_v_pattern_vra -o cout vv_vra_out.tif")
outputs = [
"uu_out.tif",
"uv_out.tif",
"vu_out.tif",
"vv_out.tif",
"uu_vr_out.tif",
"uv_vr_out.tif",
"vu_vr_out.tif",
"vv_vr_out.tif",
"uu_ura_out.tif",
"uv_ura_out.tif",
"vu_ura_out.tif",
"vv_ura_out.tif",
"uu_vra_out.tif",
"uv_vra_out.tif",
"vu_vra_out.tif",
"vv_vra_out.tif",
]
# expect a few LSB failures
failthresh = 0.008
failpercent = 3
| lgritz/OpenShadingLanguage | testsuite/regex-reg/run.py | Python | bsd-3-clause | 2,682 | 0.011186 |
#!/usr/bin/env python
'''
Copyright (C) 2007 John Beard [email protected]
##This extension allows you to draw a Cartesian grid in Inkscape.
##There is a wide range of options including subdivision, subsubdivions
## and logarithmic scales. Custom line widths are also possible.
##All elements are grouped with similar elements (eg all x-subdivs)
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
'''
import inkex
import simplestyle, sys
from math import *
def draw_SVG_line(x1, y1, x2, y2, width, name, parent):
style = { 'stroke': '#000000', 'stroke-width':str(width), 'fill': 'none' }
line_attribs = {'style':simplestyle.formatStyle(style),
inkex.addNS('label','inkscape'):name,
'd':'M '+str(x1)+','+str(y1)+' L '+str(x2)+','+str(y2)}
inkex.etree.SubElement(parent, inkex.addNS('path','svg'), line_attribs )
def draw_SVG_rect(x,y,w,h, width, fill, name, parent):
style = { 'stroke': '#000000', 'stroke-width':str(width), 'fill':fill}
rect_attribs = {'style':simplestyle.formatStyle(style),
inkex.addNS('label','inkscape'):name,
'x':str(x), 'y':str(y), 'width':str(w), 'height':str(h)}
inkex.etree.SubElement(parent, inkex.addNS('rect','svg'), rect_attribs )
class Grid_Polar(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--x_divs",
action="store", type="int",
dest="x_divs", default=5,
help="Major X Divisions")
self.OptionParser.add_option("--dx",
action="store", type="float",
dest="dx", default=100.0,
help="Major X divison Spacing")
self.OptionParser.add_option("--x_subdivs",
action="store", type="int",
dest="x_subdivs", default=2,
help="Subdivisions per Major X division")
self.OptionParser.add_option("--x_log",
action="store", type="inkbool",
dest="x_log", default=False,
help="Logarithmic x subdivisions if true")
self.OptionParser.add_option("--x_subsubdivs",
action="store", type="int",
dest="x_subsubdivs", default=5,
help="Subsubdivisions per Minor X division")
self.OptionParser.add_option("--x_half_freq",
action="store", type="int",
dest="x_half_freq", default=4,
help="Halve Subsubdiv. Frequency after 'n' Subdivs. (log only)")
self.OptionParser.add_option("--x_divs_th",
action="store", type="float",
dest="x_divs_th", default=2,
help="Major X Division Line thickness")
self.OptionParser.add_option("--x_subdivs_th",
action="store", type="float",
dest="x_subdivs_th", default=1,
help="Minor X Division Line thickness")
self.OptionParser.add_option("--x_subsubdivs_th",
action="store", type="float",
dest="x_subsubdivs_th", default=1,
help="Subminor X Division Line thickness")
self.OptionParser.add_option("--y_divs",
action="store", type="int",
dest="y_divs", default=6,
help="Major Y Divisions")
self.OptionParser.add_option("--dy",
action="store", type="float",
dest="dy", default=100.0,
help="Major Gridline Increment")
self.OptionParser.add_option("--y_subdivs",
action="store", type="int",
dest="y_subdivs", default=2,
help="Minor Divisions per Major Y division")
self.OptionParser.add_option("--y_log",
action="store", type="inkbool",
dest="y_log", default=False,
help="Logarithmic y subdivisions if true")
self.OptionParser.add_option("--y_subsubdivs",
action="store", type="int",
dest="y_subsubdivs", default=5,
help="Subsubdivisions per Minor Y division")
self.OptionParser.add_option("--y_half_freq",
action="store", type="int",
dest="y_half_freq", default=4,
help="Halve Y Subsubdiv. Frequency after 'n' Subdivs. (log only)")
self.OptionParser.add_option("--y_divs_th",
action="store", type="float",
dest="y_divs_th", default=2,
help="Major Y Division Line thickness")
self.OptionParser.add_option("--y_subdivs_th",
action="store", type="float",
dest="y_subdivs_th", default=1,
help="Minor Y Division Line thickness")
self.OptionParser.add_option("--y_subsubdivs_th",
action="store", type="float",
dest="y_subsubdivs_th", default=1,
help="Subminor Y Division Line thickness")
self.OptionParser.add_option("--border_th",
action="store", type="float",
dest="border_th", default=3,
help="Border Line thickness")
def effect(self):
#find the pixel dimensions of the overall grid
ymax = self.options.dy * self.options.y_divs
xmax = self.options.dx * self.options.x_divs
# Embed grid in group
#Put in in the centre of the current view
t = 'translate(' + str( self.view_center[0]- xmax/2.0) + ',' + \
str( self.view_center[1]- ymax/2.0) + ')'
g_attribs = {inkex.addNS('label','inkscape'):'Grid_Polar:X' + \
str( self.options.x_divs )+':Y'+str( self.options.y_divs ),
'transform':t }
grid = inkex.etree.SubElement(self.current_layer, 'g', g_attribs)
#Group for major x gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MajorXGridlines'}
majglx = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for major y gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MajorYGridlines'}
majgly = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for minor x gridlines
if self.options.x_subdivs > 1:#if there are any minor x gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MinorXGridlines'}
minglx = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for subminor x gridlines
if self.options.x_subsubdivs > 1:#if there are any minor minor x gridlines
g_attribs = {inkex.addNS('label','inkscape'):'SubMinorXGridlines'}
mminglx = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for minor y gridlines
if self.options.y_subdivs > 1:#if there are any minor y gridlines
g_attribs = {inkex.addNS('label','inkscape'):'MinorYGridlines'}
mingly = inkex.etree.SubElement(grid, 'g', g_attribs)
#Group for subminor y gridlines
if self.options.y_subsubdivs > 1:#if there are any minor minor x gridlines
g_attribs = {inkex.addNS('label','inkscape'):'SubMinorYGridlines'}
mmingly = inkex.etree.SubElement(grid, 'g', g_attribs)
draw_SVG_rect(0, 0, xmax, ymax, self.options.border_th,
'none', 'Border', grid) #border rectangle
#DO THE X DIVISONS======================================
sd = self.options.x_subdivs #sub divs per div
ssd = self.options.x_subsubdivs #subsubdivs per subdiv
for i in range(0, self.options.x_divs): #Major x divisons
if i>0: #dont draw first line (we made a proper border)
draw_SVG_line(self.options.dx*i, 0,
self.options.dx*i,ymax,
self.options.x_divs_th,
'MajorXDiv'+str(i), majglx)
if self.options.x_log: #log x subdivs
for j in range (1, sd):
if j>1: #the first loop is only for subsubdivs
draw_SVG_line(self.options.dx*(i+log(j, sd)), 0,
self.options.dx*(i+log(j, sd)), ymax,
self.options.x_subdivs_th,
'MinorXDiv'+str(i)+':'+str(j), minglx)
for k in range (1, ssd): #subsub divs
if (j <= self.options.x_half_freq) or (k%2 == 0):#only draw half the subsubdivs past the half-freq point
if (ssd%2 > 0) and (j > self.options.y_half_freq): #half frequency won't work with odd numbers of subsubdivs,
ssd2 = ssd+1 #make even
else:
ssd2 = ssd #no change
draw_SVG_line(self.options.dx*(i+log(j+k/float(ssd2),sd )), 0,
self.options.dx*(i+log(j+k/float(ssd2),sd )), ymax,
self.options.x_subsubdivs_th,'SubminorXDiv'+str(i)+':'+str(j)+':'+str(k), mminglx)
else: #linear x subdivs
for j in range (0, sd):
if j>0: #not for the first loop (this loop is for the subsubdivs before the first subdiv)
draw_SVG_line(self.options.dx*(i+j/float(sd)), 0,
self.options.dx*(i+j/float(sd)), ymax,
self.options.x_subdivs_th,
'MinorXDiv'+str(i)+':'+str(j), minglx)
for k in range (1, ssd): #subsub divs
draw_SVG_line(self.options.dx*(i+(j*ssd+k)/((float(sd)*ssd))) , 0,
self.options.dx*(i+(j*ssd+k)/((float(sd)*ssd))) , ymax,
self.options.x_subsubdivs_th,
'SubminorXDiv'+str(i)+':'+str(j)+':'+str(k), mminglx)
#DO THE Y DIVISONS========================================
sd = self.options.y_subdivs #sub divs per div
ssd = self.options.y_subsubdivs #subsubdivs per subdiv
for i in range(0, self.options.y_divs): #Major y divisons
if i>0:#dont draw first line (we will make a border)
draw_SVG_line(0, self.options.dy*i,
xmax, self.options.dy*i,
self.options.y_divs_th,
'MajorYDiv'+str(i), majgly)
if self.options.y_log: #log y subdivs
for j in range (1, sd):
if j>1: #the first loop is only for subsubdivs
draw_SVG_line(0, self.options.dy*(i+1-log(j,sd)),
xmax, self.options.dy*(i+1-log(j,sd)),
self.options.y_subdivs_th,
'MinorXDiv'+str(i)+':'+str(j), mingly)
for k in range (1, ssd): #subsub divs
if (j <= self.options.y_half_freq) or (k%2 == 0):#only draw half the subsubdivs past the half-freq point
if (ssd%2 > 0) and (j > self.options.y_half_freq): #half frequency won't work with odd numbers of subsubdivs,
ssd2 = ssd+1
else:
ssd2 = ssd #no change
draw_SVG_line(0, self.options.dx*(i+1-log(j+k/float(ssd2),sd )),
xmax, self.options.dx*(i+1-log(j+k/float(ssd2),sd )),
self.options.y_subsubdivs_th,
'SubminorXDiv'+str(i)+':'+str(j)+':'+str(k), mmingly)
else: #linear y subdivs
for j in range (0, self.options.y_subdivs):
if j>0:#not for the first loop (this loop is for the subsubdivs before the first subdiv)
draw_SVG_line(0, self.options.dy*(i+j/float(sd)),
xmax, self.options.dy*(i+j/float(sd)),
self.options.y_subdivs_th,
'MinorXYiv'+str(i)+':'+str(j), mingly)
for k in range (1, ssd): #subsub divs
draw_SVG_line(0, self.options.dy*(i+(j*ssd+k)/((float(sd)*ssd))),
xmax, self.options.dy*(i+(j*ssd+k)/((float(sd)*ssd))),
self.options.y_subsubdivs_th,
'SubminorXDiv'+str(i)+':'+str(j)+':'+str(k), mmingly)
if __name__ == '__main__':
e = Grid_Polar()
e.affect()
# vim: expandtab shiftwidth=4 tabstop=8 softtabstop=4 fileencoding=utf-8 textwidth=99
| piksels-and-lines-orchestra/inkscape | share/extensions/grid_cartesian.py | Python | gpl-2.0 | 14,264 | 0.01998 |
# Copyright (C) 2017 Red Hat, Inc. Jake Hunsaker <[email protected]>
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.report.plugins import Plugin, RedHatPlugin
class DockerDistribution(Plugin):
short_desc = 'Docker Distribution'
plugin_name = "docker_distribution"
profiles = ('container',)
def setup(self):
self.add_copy_spec('/etc/docker-distribution/')
self.add_journal('docker-distribution')
conf = self.path_join('/etc/docker-distribution/registry/config.yml')
if self.path_exists(conf):
with open(conf) as f:
for line in f:
if 'rootdirectory' in line:
loc = line.split()[1]
self.add_cmd_output('tree ' + loc)
class RedHatDockerDistribution(DockerDistribution, RedHatPlugin):
packages = ('docker-distribution',)
def setup(self):
self.add_forbidden_path('/etc/docker-distribution/registry/*passwd')
super(RedHatDockerDistribution, self).setup()
| slashdd/sos | sos/report/plugins/docker_distribution.py | Python | gpl-2.0 | 1,334 | 0 |
def binarySearch(someList, target):
lo = 0
hi = len(someList)
while lo+1 < hi:
test = (lo + hi) / 2
if someList[test] > target:
hi = test
else:
lo = test
if someList[lo] == target:
return lo
else:
return -1
import random
def quickSort(someList):
listSize = len(someList)
if len(someList) == 0:
return []
less = []
greater = []
pivot = someList.pop(random.randint(0, listSize-1))
for element in someList:
if element <= pivot:
less.append(element)
else:
greater.append(element)
retList = quickSort(less) + [pivot] + quickSort(greater)
#print("Return list:");print(retList)
return retList | KingSpork/sporklib | algorithms/binarySearch.py | Python | unlicense | 670 | 0.055224 |
from selenium import webdriver
from django.test import LiveServerTestCase, TestCase
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
import datetime
from planner.models import Participation, Event, Occurrence, EventType, Role
from django.contrib.auth.models import User
import pytz
import time
tz = pytz.timezone("Europe/Stockholm")
def event(date):
e = Event.objects.create(title="TestEvent", event_type=EventType.objects.get(name="Gudstjänst"))
e.event = Occurrence.objects.create(start_time = tz.localize(date))
Participation.objects.create(user = User.objects.get(pk=2), event = e, attending = "true", role = Role.objects.get(name = "Mötesledare"))
Participation.objects.create(user = User.objects.get(pk=3), event = e, attending = "null", role = Role.objects.get(name = "Textläsare"))
e.save()
def login(browser):
browser.find_element_by_id('id_username').send_keys("admin")
browser.find_element_by_id('id_password').send_keys("1234")
browser.find_element_by_id('id_submit').click()
class BasicTest(StaticLiveServerTestCase):
fixtures = ["fixture1.json"]
def setUp(self):
self.browser = webdriver.Firefox()
self.browser.implicitly_wait(3)
def tearDown(self):
self.browser.quit()
def test_login(self):
self.browser.get(self.live_server_url)
assert "Planering" in self.browser.title
login(self.browser)
menu = self.browser.find_element_by_id('main-menu').text
assert 'Nytt evenemang' in menu
assert 'Tabellvy' in menu
def test_event_is_displayed(self):
event(datetime.datetime.now() + datetime.timedelta(days = 17))
self.browser.get(self.live_server_url)
login(self.browser)
t = self.browser.find_element_by_id("table-scroll").text
time.sleep(10)
print(t)
assert 'Testevenemang' in t
if __name__ == '__main__':
unittest.main() | danieka/churchplanner | planner/functional_tests.py | Python | gpl-3.0 | 1,831 | 0.03337 |
import os
import unittest
import random
import xmlrunner
host = os.environ['FALKONRY_HOST_URL'] # host url
token = os.environ['FALKONRY_TOKEN'] # auth token
class TestDatastream(unittest.TestCase):
def setUp(self):
self.created_datastreams = []
self.fclient = FClient(host=host, token=token, options=None)
pass
# Create datastream without any signals
def test_create_standalone_datastream(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for narrow/historian style data from a single entity
def test_create_datastream_narrow_style_single_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
signal.set_valueIdentifier("value")
signal.set_signalIdentifier("signal")
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for narrow/historian style data from a multiple entities
def test_create_datastream_narrow_style_multiple_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
signal.set_signalIdentifier("signal") # set signal identifier
signal.set_valueIdentifier("value") # set value identifier
field.set_entityIdentifier("entity") # set entity identifier
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation')
self.assertEqual(signalResponse.get_signalIdentifier(), signal.get_signalIdentifier(), 'Invalid signal identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for wide style data from a single entity
def test_create_datastream_wide_style_single_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
input1 = Schemas.Input()
input2 = Schemas.Input()
input3 = Schemas.Input()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
input1.set_name("Signal1") # set name of input signal
input1.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input1.set_event_type("Samples") # set event type of input signal
input2.set_name("Signal2") # set name of input signal
input2.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input2.set_event_type("Samples") # set event type of input signal
input3.set_name("Signal3") # set name of input signal
input3.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input3.set_event_type("Samples") # set event type of input signal
inputs = []
inputs.append(input1)
inputs.append(input2)
inputs.append(input3)
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
field.set_signal(signal) # set signal in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
datastream.set_inputs(inputs)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
inputs = response.get_inputs()
self.assertEqual(isinstance(inputs, list), True, 'Invalid inputs object after creation')
self.assertEqual(len(inputs), 3, 'Invalid inputs object after creation')
inputResp1 = inputs.__getitem__(0)
inputResp2 = inputs.__getitem__(1)
inputResp3 = inputs.__getitem__(2)
self.assertEqual(inputResp1.get_name(), input1.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp1.get_value_type(), input1.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp2.get_name(), input2.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp2.get_value_type(), input2.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp3.get_name(), input3.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp3.get_value_type(), input3.get_value_type(),'Invalid input value type after object creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for wide style data from a multiple entities
def test_create_datastream_wide_style_multiple_entity(self):
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
input1 = Schemas.Input()
input2 = Schemas.Input()
input3 = Schemas.Input()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
input1.set_name("Signal1") # set name of input signal
input1.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input1.set_event_type("Samples") # set event type of input signal
input2.set_name("Signal2") # set name of input signal
input2.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input2.set_event_type("Samples") # set event type of input signal
input3.set_name("Signal3") # set name of input signal
input3.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input3.set_event_type("Samples") # set event type of input signal
inputs = []
inputs.append(input1)
inputs.append(input2)
inputs.append(input3)
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
field.set_signal(signal) # set signal in field
field.set_entityIdentifier("entity")
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
datastream.set_inputs(inputs)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
inputs = response.get_inputs()
self.assertEqual(isinstance(inputs, list), True, 'Invalid inputs object after creation')
self.assertEqual(len(inputs), 3, 'Invalid inputs object after creation')
inputResp1 = inputs.__getitem__(0)
inputResp2 = inputs.__getitem__(1)
inputResp3 = inputs.__getitem__(2)
self.assertEqual(inputResp1.get_name(), input1.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp1.get_value_type(), input1.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp2.get_name(), input2.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp2.get_value_type(), input2.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp3.get_name(), input3.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp3.get_value_type(), input3.get_value_type(),'Invalid input value type after object creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Retrieve Datastreams
def test_get_datastream_list(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
# get datastream list
datastreamList = self.fclient.get_datastreams()
self.assertEqual(isinstance(datastreamList, list), True, 'Invalid datastreamlist in response')
self.assertEqual(len(datastreamList) > 0, True, 'No datastreams in get response')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Retrieve Datastream by Id
def test_get_datastream_by_id(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
signal.set_signalIdentifier("signal")
signal.set_valueIdentifier("value")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
# get datastream list
datastreamResp = self.fclient.get_datastream(response.get_id())
self.assertEqual(isinstance(datastreamResp,Schemas.Datastream), True, 'Invalid time object after creation')
self.assertEqual(response.get_id(), datastreamResp.get_id(), 'Invalid id of datastream after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Delete Datastream
def test_delete_datastream_by_id(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
response = self.fclient.create_datastream(datastream)
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
# delete datastream
try:
self.fclient.delete_datastream(response.get_id())
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot delete datastream')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream microseconds precision
def test_create_datastream_micro_second_precision(self):
datastream = Schemas.Datastream()
datastream.set_name('Motor Health' + str(random.random()))
datastream.set_time_precision('micro') # set 'micro' for microseconds precision
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
time.set_zone("GMT")
time.set_identifier("time")
time.set_format("iso_8601")
signal.set_signalIdentifier("signal")
signal.set_valueIdentifier("value")
field.set_entityIdentifier("entity")
field.set_signal(signal)
datasource.set_type("STANDALONE")
field.set_time(time)
datastream.set_datasource(datasource)
datastream.set_field(field)
try:
# create Datastream
response = self.fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),None,'Invalid entity name object after creation')
signalResponse = fieldResponse.get_signal()
self.assertEqual(signalResponse.get_signalIdentifier(), "signal", 'Invalid signal identifier object after creation')
self.assertEqual(signalResponse.get_valueIdentifier(),signal.get_valueIdentifier(), 'Invalid value identifier after object creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
self.assertEqual(response.get_time_precision(), datastream.get_time_precision(), 'Invalid time precision after creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
# Create Datastream for batch identifier
def test_create_datastream_with_batch_identifier(self):
fclient = FClient(host=host, token=token,options=None)
datastream = Schemas.Datastream()
datasource = Schemas.Datasource()
field = Schemas.Field()
time = Schemas.Time()
signal = Schemas.Signal()
input1 = Schemas.Input()
input2 = Schemas.Input()
input3 = Schemas.Input()
datastream.set_name('Motor Health' + str(random.random())) # set name of the Datastream
input1.set_name("Signal1") # set name of input signal
input1.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input1.set_event_type("Samples") # set event type of input signal
input2.set_name("Signal2") # set name of input signal
input2.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input2.set_event_type("Samples") # set event type of input signal
input3.set_name("Signal3") # set name of input signal
input3.set_value_type("Numeric") # set value type of input signal (Numeric for number, Categorical for string type)
input3.set_event_type("Samples") # set event type of input signal
inputs = []
inputs.append(input1)
inputs.append(input2)
inputs.append(input3)
time.set_zone("GMT") # set timezone of the datastream
time.set_identifier("time") # set time identifier of the datastream
time.set_format("iso_8601") # set time format of the datastream
field.set_time(time)
field.set_signal(signal) # set signal in field
field.set_batchIdentifier("batch") # set batchIdentifier in field
datasource.set_type("STANDALONE") # set datastource type in datastream
datastream.set_datasource(datasource)
datastream.set_field(field)
datastream.set_inputs(inputs)
try:
# create Datastream
response = fclient.create_datastream(datastream)
self.created_datastreams.append(response.get_id())
self.assertEqual(isinstance(response, Schemas.Datastream), True, 'Invalid Datastream object after creation')
self.assertEqual(isinstance(response.get_id(), str), True, 'Invalid id of datastream after creation')
self.assertEqual(response.get_name(), datastream.get_name(), 'Invalid name of Datastream after creation')
fieldResponse = response.get_field()
self.assertEqual(isinstance(fieldResponse, Schemas.Field), True, 'Invalid field in Datastream object after creation')
self.assertEqual(fieldResponse.get_entityIdentifier(),"entity",'Invalid entity identifier object after creation')
self.assertEqual(fieldResponse.get_entityName(),response.get_name(),'Invalid entity name object after creation')
self.assertEqual(fieldResponse.get_batchIdentifier(),"batch",'Invalid batchIdentifier after creation')
timeResponse = fieldResponse.get_time()
self.assertEqual(isinstance(timeResponse, Schemas.Time), True, 'Invalid time object after creation')
self.assertEqual(timeResponse.get_zone(), time.get_zone(), 'Invalid zone object after creation')
self.assertEqual(timeResponse.get_identifier(), time.get_identifier(), 'Invalid time identifier object after creation')
self.assertEqual(timeResponse.get_format(), time.get_format(), 'Invalid time format object after creation')
inputs = response.get_inputs()
self.assertEqual(isinstance(inputs, list), True, 'Invalid inputs object after creation')
self.assertEqual(len(inputs), 3, 'Invalid inputs object after creation')
inputResp1 = inputs.__getitem__(0)
inputResp2 = inputs.__getitem__(1)
inputResp3 = inputs.__getitem__(2)
self.assertEqual(inputResp1.get_name(), input1.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp1.get_value_type(), input1.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp2.get_name(), input2.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp2.get_value_type(), input2.get_value_type(),'Invalid input value type after object creation')
self.assertEqual(inputResp3.get_name(), input3.get_name(),'Invalid input after object creation')
self.assertEqual(inputResp3.get_value_type(), input3.get_value_type(),'Invalid input value type after object creation')
except Exception as e:
print(exception_handler(e))
self.assertEqual(0, 1, 'Cannot create datastream')
def tearDown(self): # teardown
for ds in self.created_datastreams:
try:
self.fclient.delete_datastream(ds)
except Exception as e:
print(exception_handler(e))
pass
if __name__ == '__main__':
if __package__ is None:
import sys
from os import path
sys.path.append(
path.dirname(
path.dirname(
path.abspath(__file__)
)
)
)
from falkonryclient import schemas as Schemas
from falkonryclient import client as FClient
from falkonryclient.helper.utils import exception_handler
else:
from ..falkonryclient import schemas as Schemas
from ..falkonryclient import client as FClient
from ..falkonryclient.helper.utils import exception_handler
unittest.main(
testRunner=xmlrunner.XMLTestRunner(output='out'),
failfast=False, buffer=False, catchbreak=False)
else:
from falkonryclient import schemas as Schemas
from falkonryclient import client as FClient
from falkonryclient.helper.utils import exception_handler
| Falkonry/falkonry-python-client | test/TestDatastream.py | Python | mit | 35,686 | 0.00737 |
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.models import User
from django.http import HttpResponseForbidden
from django.shortcuts import redirect, render
from django.views.generic import View
from django.views.generic.base import TemplateView
from django.utils.translation import ugettext_lazy as _
from mailing.models import Mail
from .forms import *
LENGTHPASSWORD = 8
REDIRECTADMIN = "/admin"
SUBJECTMAIL = _("New SignUp at Kwyjibo")
BODYMAIL = _("Welcome to Kwyjibo! You have signed up successfully. You can now acces to the platform with the user: {username}.")
SUBJECTMAILRECOVERY = _("Recupero de password en Jarvis")
BODYMAILRECOVERY = _("Has pedido un recupero de password en Jarvis. Aquí están tus nuevos datos de acceso: '%s'/'%s'.")
SUBJECTMAILCHANGE = _("Cambio de password en Jarvis")
BODYMAILCHANGE = _("Has cambiado el password en Jarvis. Ya puedes acceder con el usuario '%s'. El password ingresado no se envia por cuestiones de seguridad.")
class IndexView(LoginRequiredMixin, View):
def get(self, request, course_id = None):
user = request.user
course = None
if(Teacher.objects.filter(user_id=user.id)):
if course_id:
course = Course.objects.get(pk = course_id)
request.session["current_course_id"] = course.pk
request.session["current_course_name"] = course.name
return redirect('teachers:dashboard', course_id = course_id)
else:
if Course.objects.all().exists():
course = Course.objects.all().order_by('-name')[:1][0]
if course:
request.session["current_course_id"] = course.pk
request.session["current_course_name"] = course.name
return redirect('teachers:dashboard', course_id = course.pk)
return redirect('teachers:index')
elif(user.is_superuser):
return HttpResponseRedirect(REDIRECTADMIN)
elif(Student.objects.filter(user_id=user.id).exists()):
student = Student.objects.get(user_id=user.id)
if (student.shifts.all().count() == 1):
return redirect('students:assignments', course_id = student.shifts.all()[0].course.pk)
else:
return redirect('students:index')
else:
return render(request, 'index.html')
class SignUpView(View):
def get(self, request):
form = RegistrationForm()
return render(request, 'registration/register.html', {'form': form, })
def post(self, request):
form = RegistrationForm(request.POST)
if (form.is_valid()):
user = User()
user.username = form.data['username']
user.first_name = form.data['first_name']
user.last_name = form.data['last_name']
user.set_password(form.data['password'])
user.email = form.data['email']
user.save()
student = Student()
student.user = user
student.uid = form.data['username']
student.save()
if (Shift.objects.all().count() > 0):
shift = Shift.objects.get(pk=form.data['shifts']);
student.shifts.add(shift)
student.save()
mail = Mail()
mail.save_mail(SUBJECTMAIL, BODYMAIL.format(username = user.username), user.email)
return render(request, 'registration/registration-success.html')
return render(request, 'registration/register.html', {'form': form,})
class ChangePasswordView(View):
def get(self, request):
if not request.user.is_authenticated():
redirect('index')
form = ChangePasswordForm()
return render(request, 'registration/change_pass.html', {'form': form, })
def post(self, request):
if not request.user.is_authenticated():
redirect('index')
form = ChangePasswordForm(request.POST)
if form.is_valid():
data = form.cleaned_data
user = User.objects.get(pk = request.user.pk)
if user.check_password(data['current_password']):
user.set_password(data['password'])
user.save()
else:
bad_password = True
return render(request, 'registration/change_password.html', {
'form': form,
'bad_password': bad_password
})
login(request, user)
return redirect('index')
return render(request, 'registration/change_password.html', {'form': form, })
def logout_page(request):
"""
Log users out and re-direct them to the main page.
"""
logout(request)
return redirect('index')
| pelgoros/kwyjibo | kwyjibo/views.py | Python | gpl-3.0 | 4,918 | 0.005696 |
'''
Created on June 6, 2018
Filer Guidelines: esma32-60-254_esef_reporting_manual.pdf
@author: Workiva
(c) Copyright 2022 Workiva, All rights reserved.
'''
try:
import regex as re
except ImportError:
import re
from arelle.ModelValue import qname
from arelle.XbrlConst import all, notAll, hypercubeDimension, dimensionDomain, domainMember, dimensionDefault, widerNarrower
browserMaxBase64ImageLength = 5242880 # 5MB
esefTaxonomyNamespaceURIs = {
"http://xbrl.ifrs.org/taxonomy/20",
"http://xbrl.ifrs.org/taxonomy/20",
}
disallowedURIsPattern = re.compile(
"http://xbrl.ifrs.org/taxonomy/[0-9-]{10}/full_ifrs/full_ifrs-cor_[0-9-]{10}[.]xsd|"
"http://www.esma.europa.eu/taxonomy/[0-9-]{10}/esef_all.xsd"
)
DefaultDimensionLinkroles = ("http://www.esma.europa.eu/xbrl/role/cor/ifrs-dim_role-990000",)
LineItemsNotQualifiedLinkrole = "http://www.esma.europa.eu/xbrl/role/cor/esef_role-999999"
qnDomainItemTypes = {qname("{http://www.xbrl.org/dtr/type/non-numeric}nonnum:domainItemType"),
qname("{http://www.xbrl.org/dtr/type/2020-01-21}nonnum:domainItemType")}
linkbaseRefTypes = {
"http://www.xbrl.org/2003/role/calculationLinkbaseRef": "cal",
"http://www.xbrl.org/2003/role/definitionLinkbaseRef": "def",
"http://www.xbrl.org/2003/role/labelLinkbaseRef": "lab",
"http://www.xbrl.org/2003/role/presentationLinkbaseRef": "pre",
"http://www.xbrl.org/2003/role/referenceLinkbaseRef": "ref"
}
filenamePatterns = {
"cal": "{base}-{date}_cal.xml",
"def": "{base}-{date}_def.xml",
"lab": "{base}-{date}_lab-{lang}.xml",
"pre": "{base}-{date}_pre.xml",
"ref": "{base}-{date}_ref.xml"
}
filenameRegexes = {
"cal": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_cal[.]xml$",
"def": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_def[.]xml$",
"lab": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_lab-[a-zA-Z]{1,8}(-[a-zA-Z0-9]{1,8})*[.]xml$",
"pre": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_pre[.]xml$",
"ref": r"(.{1,})-[0-9]{4}-[0-9]{2}-[0-9]{2}_ref[.]xml$"
}
mandatory = set() # mandatory element qnames
# hidden references
untransformableTypes = {"anyURI", "base64Binary", "hexBinary", "NOTATION", "QName", "time",
"token", "language"}
esefDefinitionArcroles = {
all, notAll, hypercubeDimension, dimensionDomain, domainMember, dimensionDefault,
widerNarrower
}
esefPrimaryStatementPlaceholderNames = (
# to be augmented with future IFRS releases as they come known, as well as further PFS placeholders
"StatementOfFinancialPositionAbstract",
"IncomeStatementAbstract",
"StatementOfComprehensiveIncomeAbstract",
"StatementOfCashFlowsAbstract",
"StatementOfChangesInEquityAbstract",
"StatementOfChangesInNetAssetsAvailableForBenefitsAbstract",
"StatementOfProfitOrLossAndOtherComprehensiveIncomeAbstract"
)
esefStatementsOfMonetaryDeclarationNames = {
# from Annex II para 1
"StatementOfFinancialPositionAbstract",
"StatementOfProfitOrLossAndOtherComprehensiveIncomeAbstract"
"StatementOfChangesInEquityAbstract",
"StatementOfCashFlowsAbstract",
}
esefMandatoryElementNames2020 = (
"NameOfReportingEntityOrOtherMeansOfIdentification",
"ExplanationOfChangeInNameOfReportingEntityOrOtherMeansOfIdentificationFromEndOfPrecedingReportingPeriod",
"DomicileOfEntity",
"LegalFormOfEntity",
"CountryOfIncorporation",
"AddressOfRegisteredOfficeOfEntity",
"PrincipalPlaceOfBusiness",
"DescriptionOfNatureOfEntitysOperationsAndPrincipalActivities",
"NameOfParentEntity",
"NameOfUltimateParentOfGroup"
)
esefMandatoryElementNames2022 = (
"AddressOfRegisteredOfficeOfEntity",
"CountryOfIncorporation",
"DescriptionOfAccountingPolicyForAvailableforsaleFinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForBiologicalAssetsExplanatory",
"DescriptionOfAccountingPolicyForBorrowingCostsExplanatory",
"DescriptionOfAccountingPolicyForBorrowingsExplanatory",
"DescriptionOfAccountingPolicyForBusinessCombinationsExplanatory",
"DescriptionOfAccountingPolicyForBusinessCombinationsAndGoodwillExplanatory",
"DescriptionOfAccountingPolicyForCashFlowsExplanatory",
"DescriptionOfAccountingPolicyForCollateralExplanatory",
"DescriptionOfAccountingPolicyForConstructionInProgressExplanatory",
"DescriptionOfAccountingPolicyForContingentLiabilitiesAndContingentAssetsExplanatory",
"DescriptionOfAccountingPolicyForCustomerAcquisitionCostsExplanatory",
"DescriptionOfAccountingPolicyForCustomerLoyaltyProgrammesExplanatory",
"DescriptionOfAccountingPolicyForDecommissioningRestorationAndRehabilitationProvisionsExplanatory",
"DescriptionOfAccountingPolicyForDeferredAcquisitionCostsArisingFromInsuranceContractsExplanatory",
"DescriptionOfAccountingPolicyForDeferredIncomeTaxExplanatory",
"DescriptionOfAccountingPolicyForDepreciationExpenseExplanatory",
"DescriptionOfAccountingPolicyForDerecognitionOfFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForDerivativeFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForDerivativeFinancialInstrumentsAndHedgingExplanatory",
"DescriptionOfAccountingPolicyToDetermineComponentsOfCashAndCashEquivalents",
"DescriptionOfAccountingPolicyForDiscontinuedOperationsExplanatory",
"DescriptionOfAccountingPolicyForDiscountsAndRebatesExplanatory",
"DescriptionOfAccountingPolicyForDividendsExplanatory",
"DescriptionOfAccountingPolicyForEarningsPerShareExplanatory",
"DescriptionOfAccountingPolicyForEmissionRightsExplanatory",
"DescriptionOfAccountingPolicyForEmployeeBenefitsExplanatory",
"DescriptionOfAccountingPolicyForEnvironmentRelatedExpenseExplanatory",
"DescriptionOfAccountingPolicyForExceptionalItemsExplanatory",
"DescriptionOfAccountingPolicyForExpensesExplanatory",
"DescriptionOfAccountingPolicyForExplorationAndEvaluationExpenditures",
"DescriptionOfAccountingPolicyForFairValueMeasurementExplanatory",
"DescriptionOfAccountingPolicyForFeeAndCommissionIncomeAndExpenseExplanatory",
"DescriptionOfAccountingPolicyForFinanceCostsExplanatory",
"DescriptionOfAccountingPolicyForFinanceIncomeAndCostsExplanatory",
"DescriptionOfAccountingPolicyForFinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForFinancialGuaranteesExplanatory",
"DescriptionOfAccountingPolicyForFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForFinancialInstrumentsAtFairValueThroughProfitOrLossExplanatory",
"DescriptionOfAccountingPolicyForFinancialLiabilitiesExplanatory",
"DescriptionOfAccountingPolicyForForeignCurrencyTranslationExplanatory",
"DescriptionOfAccountingPolicyForFranchiseFeesExplanatory",
"DescriptionOfAccountingPolicyForFunctionalCurrencyExplanatory",
"DescriptionOfAccountingPolicyForGoodwillExplanatory",
"DescriptionOfAccountingPolicyForGovernmentGrants",
"DescriptionOfAccountingPolicyForHedgingExplanatory",
"DescriptionOfAccountingPolicyForHeldtomaturityInvestmentsExplanatory",
"DescriptionOfAccountingPolicyForImpairmentOfAssetsExplanatory",
"DescriptionOfAccountingPolicyForImpairmentOfFinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForImpairmentOfNonfinancialAssetsExplanatory",
"DescriptionOfAccountingPolicyForIncomeTaxExplanatory",
"DescriptionOfAccountingPolicyForInsuranceContracts",
"DescriptionOfAccountingPolicyForIntangibleAssetsAndGoodwillExplanatory",
"DescriptionOfAccountingPolicyForIntangibleAssetsOtherThanGoodwillExplanatory",
"DescriptionOfAccountingPolicyForInterestIncomeAndExpenseExplanatory",
"DescriptionOfAccountingPolicyForInvestmentInAssociates",
"DescriptionOfAccountingPolicyForInvestmentInAssociatesAndJointVenturesExplanatory",
"DescriptionOfAccountingPolicyForInvestmentPropertyExplanatory",
"DescriptionOfAccountingPolicyForInvestmentsInJointVentures",
"DescriptionOfAccountingPolicyForInvestmentsOtherThanInvestmentsAccountedForUsingEquityMethodExplanatory",
"DescriptionOfAccountingPolicyForIssuedCapitalExplanatory",
"DescriptionOfAccountingPolicyForLeasesExplanatory",
"DescriptionOfAccountingPolicyForLoansAndReceivablesExplanatory",
"DescriptionOfAccountingPolicyForMeasuringInventories",
"DescriptionOfAccountingPolicyForMiningAssetsExplanatory",
"DescriptionOfAccountingPolicyForMiningRightsExplanatory",
"DescriptionOfAccountingPolicyForNoncurrentAssetsOrDisposalGroupsClassifiedAsHeldForSaleExplanatory",
"DescriptionOfAccountingPolicyForNoncurrentAssetsOrDisposalGroupsClassifiedAsHeldForSaleAndDiscontinuedOperationsExplanatory",
"DescriptionOfAccountingPolicyForOffsettingOfFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForOilAndGasAssetsExplanatory",
"DescriptionOfAccountingPolicyForProgrammingAssetsExplanatory",
"DescriptionOfAccountingPolicyForPropertyPlantAndEquipmentExplanatory",
"DescriptionOfAccountingPolicyForProvisionsExplanatory",
"DescriptionOfAccountingPolicyForReclassificationOfFinancialInstrumentsExplanatory",
"DescriptionOfAccountingPolicyForRecognisingDifferenceBetweenFairValueAtInitialRecognitionAndAmountDeterminedUsingValuationTechniqueExplanatory",
"DescriptionOfAccountingPolicyForRecognitionOfRevenue",
"DescriptionOfAccountingPolicyForRegulatoryDeferralAccountsExplanatory",
"DescriptionOfAccountingPolicyForReinsuranceExplanatory",
"DescriptionOfAccountingPolicyForRepairsAndMaintenanceExplanatory",
"DescriptionOfAccountingPolicyForRepurchaseAndReverseRepurchaseAgreementsExplanatory",
"DescriptionOfAccountingPolicyForResearchAndDevelopmentExpenseExplanatory",
"DescriptionOfAccountingPolicyForRestrictedCashAndCashEquivalentsExplanatory",
"DescriptionOfAccountingPolicyForSegmentReportingExplanatory",
"DescriptionOfAccountingPolicyForServiceConcessionArrangementsExplanatory",
"DescriptionOfAccountingPolicyForSharebasedPaymentTransactionsExplanatory",
"DescriptionOfAccountingPolicyForStrippingCostsExplanatory",
"DescriptionOfAccountingPolicyForSubsidiariesExplanatory",
"DescriptionOfAccountingPolicyForTaxesOtherThanIncomeTaxExplanatory",
"DescriptionOfAccountingPolicyForTerminationBenefits",
"DescriptionOfAccountingPolicyForTradeAndOtherPayablesExplanatory",
"DescriptionOfAccountingPolicyForTradeAndOtherReceivablesExplanatory",
"DescriptionOfAccountingPolicyForTradingIncomeAndExpenseExplanatory",
"DescriptionOfAccountingPolicyForTransactionsWithNoncontrollingInterestsExplanatory",
"DescriptionOfAccountingPolicyForTransactionsWithRelatedPartiesExplanatory",
"DescriptionOfAccountingPolicyForTreasurySharesExplanatory",
"DescriptionOfAccountingPolicyForWarrantsExplanatory",
"DescriptionOfReasonWhyFinancialStatementsAreNotEntirelyComparable",
"DescriptionOfNatureOfEntitysOperationsAndPrincipalActivities",
"DescriptionOfOtherAccountingPoliciesRelevantToUnderstandingOfFinancialStatements",
"DescriptionOfReasonForUsingLongerOrShorterReportingPeriod",
"DisclosureOfAccountingJudgementsAndEstimatesExplanatory",
"DisclosureOfAccruedExpensesAndOtherLiabilitiesExplanatory",
"DisclosureOfAllowanceForCreditLossesExplanatory",
"DisclosureOfAssetsAndLiabilitiesWithSignificantRiskOfMaterialAdjustmentExplanatory",
"DisclosureOfSignificantInvestmentsInAssociatesExplanatory",
"DisclosureOfAuditorsRemunerationExplanatory",
"DisclosureOfAuthorisationOfFinancialStatementsExplanatory",
"DisclosureOfAvailableforsaleAssetsExplanatory",
"DisclosureOfBasisOfConsolidationExplanatory",
"DisclosureOfBasisOfPreparationOfFinancialStatementsExplanatory",
"DisclosureOfBiologicalAssetsAndGovernmentGrantsForAgriculturalActivityExplanatory",
"DisclosureOfBorrowingsExplanatory",
"DisclosureOfBusinessCombinationsExplanatory",
"DisclosureOfCashAndBankBalancesAtCentralBanksExplanatory",
"DisclosureOfCashAndCashEquivalentsExplanatory",
"DisclosureOfCashFlowStatementExplanatory",
"DisclosureOfChangesInAccountingPoliciesExplanatory",
"DisclosureOfChangesInAccountingPoliciesAccountingEstimatesAndErrorsExplanatory",
"DisclosureOfClaimsAndBenefitsPaidExplanatory",
"DisclosureOfCollateralExplanatory",
"DisclosureOfCommitmentsExplanatory",
"DisclosureOfCommitmentsAndContingentLiabilitiesExplanatory",
"DisclosureOfContingentLiabilitiesExplanatory",
"DisclosureOfCostOfSalesExplanatory",
"DisclosureOfCreditRiskExplanatory",
"DisclosureOfDebtSecuritiesExplanatory",
"DisclosureOfDeferredAcquisitionCostsArisingFromInsuranceContractsExplanatory",
"DisclosureOfDeferredIncomeExplanatory",
"DisclosureOfDeferredTaxesExplanatory",
"DisclosureOfDepositsFromBanksExplanatory",
"DisclosureOfDepositsFromCustomersExplanatory",
"DisclosureOfDepreciationAndAmortisationExpenseExplanatory",
"DisclosureOfDerivativeFinancialInstrumentsExplanatory",
"DisclosureOfDiscontinuedOperationsExplanatory",
"DisclosureOfDividendsExplanatory",
"DisclosureOfEarningsPerShareExplanatory",
"DisclosureOfEffectOfChangesInForeignExchangeRatesExplanatory",
"DisclosureOfEmployeeBenefitsExplanatory",
"DisclosureOfEntitysReportableSegmentsExplanatory",
"DisclosureOfEventsAfterReportingPeriodExplanatory",
"DisclosureOfExpensesExplanatory",
"DisclosureOfExpensesByNatureExplanatory",
"DisclosureOfExplorationAndEvaluationAssetsExplanatory",
"DisclosureOfFairValueMeasurementExplanatory",
"DisclosureOfFairValueOfFinancialInstrumentsExplanatory",
"DisclosureOfFeeAndCommissionIncomeExpenseExplanatory",
"DisclosureOfFinanceCostExplanatory",
"DisclosureOfFinanceIncomeExpenseExplanatory",
"DisclosureOfFinanceIncomeExplanatory",
"DisclosureOfFinancialAssetsHeldForTradingExplanatory",
"DisclosureOfFinancialInstrumentsExplanatory",
"DisclosureOfFinancialInstrumentsAtFairValueThroughProfitOrLossExplanatory",
"DisclosureOfFinancialInstrumentsDesignatedAtFairValueThroughProfitOrLossExplanatory",
"DisclosureOfFinancialInstrumentsHeldForTradingExplanatory",
"DisclosureOfFinancialLiabilitiesHeldForTradingExplanatory",
"DisclosureOfFinancialRiskManagementExplanatory",
"DisclosureOfFirstTimeAdoptionExplanatory",
"DisclosureOfGeneralAndAdministrativeExpenseExplanatory",
"DisclosureOfGeneralInformationAboutFinancialStatementsExplanatory",
"DisclosureOfGoingConcernExplanatory",
"DisclosureOfGoodwillExplanatory",
"DisclosureOfGovernmentGrantsExplanatory",
"DisclosureOfImpairmentOfAssetsExplanatory",
"DisclosureOfIncomeTaxExplanatory",
"DisclosureOfInformationAboutEmployeesExplanatory",
"DisclosureOfInformationAboutKeyManagementPersonnelExplanatory",
"DisclosureOfInsuranceContractsExplanatory",
"DisclosureOfInsurancePremiumRevenueExplanatory",
"DisclosureOfIntangibleAssetsExplanatory",
"DisclosureOfIntangibleAssetsAndGoodwillExplanatory",
"DisclosureOfInterestExpenseExplanatory",
"DisclosureOfInterestIncomeExpenseExplanatory",
"DisclosureOfInterestIncomeExplanatory",
"DisclosureOfInventoriesExplanatory",
"DisclosureOfInvestmentContractsLiabilitiesExplanatory",
"DisclosureOfInvestmentPropertyExplanatory",
"DisclosureOfInvestmentsAccountedForUsingEquityMethodExplanatory",
"DisclosureOfInvestmentsOtherThanInvestmentsAccountedForUsingEquityMethodExplanatory",
"DisclosureOfIssuedCapitalExplanatory",
"DisclosureOfJointVenturesExplanatory",
"DisclosureOfLeasePrepaymentsExplanatory",
"DisclosureOfLeasesExplanatory",
"DisclosureOfLiquidityRiskExplanatory",
"DisclosureOfLoansAndAdvancesToBanksExplanatory",
"DisclosureOfLoansAndAdvancesToCustomersExplanatory",
"DisclosureOfMarketRiskExplanatory",
"DisclosureOfNetAssetValueAttributableToUnitholdersExplanatory",
"DisclosureOfNoncontrollingInterestsExplanatory",
"DisclosureOfNoncurrentAssetsHeldForSaleAndDiscontinuedOperationsExplanatory",
"DisclosureOfNoncurrentAssetsOrDisposalGroupsClassifiedAsHeldForSaleExplanatory",
"DisclosureOfObjectivesPoliciesAndProcessesForManagingCapitalExplanatory",
"DisclosureOfOtherAssetsExplanatory",
"DisclosureOfOtherCurrentAssetsExplanatory",
"DisclosureOfOtherCurrentLiabilitiesExplanatory",
"DisclosureOfOtherLiabilitiesExplanatory",
"DisclosureOfOtherNoncurrentAssetsExplanatory",
"DisclosureOfOtherNoncurrentLiabilitiesExplanatory",
"DisclosureOfOtherOperatingExpenseExplanatory",
"DisclosureOfOtherOperatingIncomeExpenseExplanatory",
"DisclosureOfOtherOperatingIncomeExplanatory",
"DisclosureOfPrepaymentsAndOtherAssetsExplanatory",
"DisclosureOfProfitLossFromOperatingActivitiesExplanatory",
"DisclosureOfPropertyPlantAndEquipmentExplanatory",
"DisclosureOfOtherProvisionsExplanatory",
"DisclosureOfReclassificationOfFinancialInstrumentsExplanatory",
"DisclosureOfReclassificationsOrChangesInPresentationExplanatory",
"DisclosureOfRecognisedRevenueFromConstructionContractsExplanatory"
"DisclosureOfReinsuranceExplanatory",
"DisclosureOfRelatedPartyExplanatory",
"DisclosureOfRepurchaseAndReverseRepurchaseAgreementsExplanatory",
"DisclosureOfResearchAndDevelopmentExpenseExplanatory",
"DisclosureOfReservesAndOtherEquityInterestExplanatory",
"DisclosureOfRestrictedCashAndCashEquivalentsExplanatory",
"DisclosureOfRevenueExplanatory",
"DisclosureOfServiceConcessionArrangementsExplanatory",
"DisclosureOfShareCapitalReservesAndOtherEquityInterestExplanatory",
"DisclosureOfSharebasedPaymentArrangementsExplanatory",
"DisclosureOfSummaryOfSignificantAccountingPoliciesExplanatory",
"DisclosureOfSubordinatedLiabilitiesExplanatory",
"DisclosureOfSignificantInvestmentsInSubsidiariesExplanatory",
"DisclosureOfTaxReceivablesAndPayablesExplanatory",
"DisclosureOfTradeAndOtherPayablesExplanatory",
"DisclosureOfTradeAndOtherReceivablesExplanatory",
"DisclosureOfTradingIncomeExpenseExplanatory",
"DisclosureOfTreasurySharesExplanatory",
"DescriptionOfUncertaintiesOfEntitysAbilityToContinueAsGoingConcern",
"DividendsProposedOrDeclaredBeforeFinancialStatementsAuthorisedForIssueButNotRecognisedAsDistributionToOwners",
"DividendsProposedOrDeclaredBeforeFinancialStatementsAuthorisedForIssueButNotRecognisedAsDistributionToOwnersPerShare",
"DividendsRecognisedAsDistributionsToOwnersPerShare",
"DomicileOfEntity",
"ExplanationOfDepartureFromIFRS",
"ExplanationOfFactAndBasisForPreparationOfFinancialStatementsWhenNotGoingConcernBasis",
"ExplanationOfFinancialEffectOfDepartureFromIFRS",
"ExplanationOfAssumptionAboutFutureWithSignificantRiskOfResultingInMaterialAdjustments",
"ExplanationWhyFinancialStatementsNotPreparedOnGoingConcernBasis",
"LegalFormOfEntity",
"LengthOfLifeOfLimitedLifeEntity",
"NameOfParentEntity",
"NameOfReportingEntityOrOtherMeansOfIdentification",
"NameOfUltimateParentOfGroup",
"PrincipalPlaceOfBusiness",
"StatementOfIFRSCompliance",
) | acsone/Arelle | arelle/plugin/validate/ESEF/Const.py | Python | apache-2.0 | 19,152 | 0.00282 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import print_function
# python standard library
from socket import socket
import sys, os, re, stat, math, time, datetime
import importlib
# third party modules
try: # unicode monkeypatch for windoze
import win_unicode_console
win_unicode_console.enable()
except:
msg = "Please install the 'win_unicode_console' module."
if os.name == 'nt': print(msg)
try: # os independent color support
from colorama import init, Fore, Back, Style
init() # required to get colors on windoze
except ImportError:
msg = "Please install the 'colorama' module for color support."
# poor man's colored output (ANSI)
class Back():
BLUE = '\x1b[44m' if os.name == 'posix' else ''
CYAN = '\x1b[46m' if os.name == 'posix' else ''
GREEN = '\x1b[42m' if os.name == 'posix' else ''
MAGENTA = '\x1b[45m' if os.name == 'posix' else ''
RED = '\x1b[41m' if os.name == 'posix' else ''
class Fore():
BLUE = '\x1b[34m' if os.name == 'posix' else ''
CYAN = '\x1b[36m' if os.name == 'posix' else ''
MAGENTA = '\x1b[35m' if os.name == 'posix' else ''
YELLOW = '\x1b[33m' if os.name == 'posix' else ''
class Style():
DIM = '\x1b[2m' if os.name == 'posix' else ''
BRIGHT = '\x1b[1m' if os.name == 'posix' else ''
RESET_ALL = '\x1b[0m' if os.name == 'posix' else ''
NORMAL = '\x1b[22m' if os.name == 'posix' else ''
print(Back.RED + msg + Style.RESET_ALL)
# ----------------------------------------------------------------------
# return first item of list or alternative
def item(mylist, alternative=""):
return next(iter(mylist), alternative)
# split list into chunks of equal size
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i+n]
# ----------------------------------------------------------------------
class log():
# open logfile
def open(self, filename):
try:
return open(filename, mode='wb')
except IOError as e:
output().errmsg("Cannot open logfile", e)
return None
# write raw data to logfile
def write(self, logfile, data):
# logfile open and data non-empty
if logfile and data:
try:
logfile.write(data)
except IOError as e:
output().errmsg("Cannot log", e)
# write comment to logfile
def comment(self, logfile, line):
comment = "%" + ("[ " + line + " ]").center(72, '-')
self.write(logfile, os.linesep + comment + os.linesep)
# close logfile
def close(self, logfile):
try:
logfile.close()
except IOError as e:
output().errmsg("Cannot close logfile", e)
# ----------------------------------------------------------------------
class output():
# show send commands (debug mode)
def send(self, str, mode):
if str: print(Back.CYAN + str + Style.RESET_ALL)
if str and mode == 'hex':
print(Fore.CYAN + conv().hex(str, ':') + Style.RESET_ALL)
# show recv commands (debug mode)
def recv(self, str, mode):
if str: print(Back.MAGENTA + str + Style.RESET_ALL)
if str and mode == 'hex':
print(Fore.MAGENTA + conv().hex(str, ':') + Style.RESET_ALL)
# show information
def info(self, msg, eol=None):
if msg: print(Back.BLUE + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show raw data
def raw(self, msg, eol=None):
if msg: print(Fore.YELLOW + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show chit-chat
def chitchat(self, msg, eol=None):
if msg: print(Style.DIM + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show warning message
def warning(self, msg):
if msg: print(Back.RED + msg + Style.RESET_ALL)
# show green message
def green(self, msg):
if msg: print(Back.GREEN + msg + Style.RESET_ALL)
# show error message
def errmsg(self, msg, info=""):
info = str(info).strip()
if info: # monkeypatch to make python error message less ugly
info = item(re.findall('Errno -?\d+\] (.*)', info), '') or info.splitlines()[-1]
info = Style.RESET_ALL + Style.DIM + " (" + info.strip('<>') + ")" + Style.RESET_ALL
if msg: print(Back.RED + msg + info)
# show printer and status
def discover(self, xxx_todo_changeme):
(ipaddr, (device, uptime, status, prstat)) = xxx_todo_changeme
ipaddr = output().strfit(ipaddr, 15)
device = output().strfit(device, 27)
uptime = output().strfit(uptime, 8)
status = output().strfit(status, 23)
if device.strip() != 'device': device = Style.BRIGHT + device + Style.NORMAL
if prstat == '1': status = Back.GREEN + status + Back.BLUE # unknown
if prstat == '2': status = Back.GREEN + status + Back.BLUE # running
if prstat == '3': status = Back.YELLOW + status + Back.BLUE # warning
if prstat == '4': status = Back.GREEN + status + Back.BLUE # testing
if prstat == '5': status = Back.RED + status + Back.BLUE # down
line = (ipaddr, device, uptime, status)
output().info('%-15s %-27s %-8s %-23s' % line)
# recursively list files
def psfind(self, name):
vol = Style.DIM + Fore.YELLOW + item(re.findall("^(%.*%)", name)) + Style.RESET_ALL
name = Fore.YELLOW + const.SEP + re.sub("^(%.*%)", '', name) + Style.RESET_ALL
print("%s %s" % (vol, name))
# show directory listing
def psdir(self, isdir, size, mtime, name, otime):
otime = Style.DIM + "(created " + otime + ")" + Style.RESET_ALL
vol = Style.DIM + Fore.YELLOW + item(re.findall("^(%.*%)", name)) + Style.RESET_ALL
name = re.sub("^(%.*%)", '', name) # remove volume information from filename
name = Style.BRIGHT + Fore.BLUE + name + Style.RESET_ALL if isdir else name
if isdir: print("d %8s %s %s %s %s" % (size, mtime, otime, vol, name))
else: print("- %8s %s %s %s %s" % (size, mtime, otime, vol, name))
# show directory listing
def pjldir(self, name, size):
name = name if size else Style.BRIGHT + Fore.BLUE + name + Style.RESET_ALL
if size: print("- %8s %s" % (size, name))
else: print("d %8s %s" % ("-", name))
# show directory listing
def pcldir(self, size, mtime, id, name):
id = Style.DIM + "(macro id: " + id + ")" + Style.RESET_ALL
print("- %8s %s %s %s" % (size, mtime, id, name))
# show output from df
def df(self, args):
self.info("%-16s %-11s %-11s %-9s %-10s %-8s %-9s %-10s %-10s" % args)
# show fuzzing results
def fuzzed(self, path, cmd, opt):
opt1, opt2, opt3 = opt
if isinstance(opt1, bool): opt1 = (Back.GREEN + str(opt1) + Back.BLUE + " ")\
if opt1 else (Back.RED + str(opt1) + Back.BLUE + " ")
if isinstance(opt2, bool): opt2 = (Back.GREEN + str(opt2) + Back.BLUE + " ")\
if opt2 else (Back.RED + str(opt2) + Back.BLUE + " ")
if isinstance(opt3, bool): opt3 = (Back.GREEN + str(opt3) + Back.BLUE + " ")\
if opt3 else (Back.RED + str(opt3) + Back.BLUE + " ")
opt = opt1, opt2, opt3
self.info("%-35s %-12s %-7s %-7s %-7s" % ((path, cmd) + opt))
# show captured jobs
def joblist(self, xxx_todo_changeme1):
(date, size, user, name, soft) = xxx_todo_changeme1
user = output().strfit(user, 13)
name = output().strfit(name, 22)
soft = output().strfit(soft, 20)
line = (date, size, user, name, soft)
output().info('%-12s %5s %-13s %-22s %-20s' % line)
# show ascii only
def ascii(self, data):
data = re.sub(r"(\x00){10}", "\x00", data) # shorten nullbyte streams
data = re.sub(r"([^ -~])", ".", data) # replace non-printable chars
self.raw(data, "")
# show binary dump
def dump(self, data):
# experimental regex to match sensitive strings like passwords
data = re.sub(r"[\x00-\x06,\x1e]([!-~]{6,}?(?!\\0A))\x00{16}", "START" + r"\1" + "STOP", data)
data = re.sub(r"\00+", "\x00", data) # ignore nullbyte streams
data = re.sub(r"(\x00){10}", "\x00", data) # ignore nullbyte streams
data = re.sub(r"([\x00-\x1f,\x7f-\xff])", ".", data)
data = re.sub(r"START([!-~]{6,}?)STOP", Style.RESET_ALL + Back.BLUE + r"\1" + Style.RESET_ALL + Fore.YELLOW, data)
self.raw(data, "")
# dump ps dictionary
def psdict(self, data, indent=''):
importlib.reload(sys) # workaround for non-ascii output
sys.setdefaultencoding('UTF8')
# convert list to dictionary with indices as keys
if isinstance(data, list):
data = dict(enumerate(data))
# data now is expected to be a dictionary
if len(list(data.keys())) > 0: last = sorted(data.keys())[-1]
for key, val in sorted(data.items()):
type = val['type'].replace('type', '')
value = val['value']
perms = val['perms']
recursion = False
# current enty is a dictionary
if isinstance(value, dict):
value, recursion = '', True
# current enty is a ps array
if isinstance(value, list):
try: # array contains only atomic values
value = ' '.join(x['value'] for x in value)
except: # array contains further list or dict
# value = sum(val['value'], [])
value, recursion = '', True
# value = value.encode('ascii', errors='ignore')
node = '┬' if recursion else '─'
edge = indent + ('└' if key == last else '├')
# output current node in dictionary
print("%s%s %-3s %-11s %-30s %s" % (edge, node, perms, type, key, value))
if recursion: # ...
self.psdict(val['value'], indent + (' ' if key == last else '│'))
# show some information
def psonly(self):
self.chitchat("Info: This only affects jobs printed by a PostScript driver")
# countdown from sec to zero
def countdown(self, msg, sec, cmd):
try:
sys.stdout.write(msg)
for x in reversed(list(range(1, sec+1))):
sys.stdout.write(" " + str(x))
sys.stdout.flush()
time.sleep(1)
print(" KABOOM!")
return True
except KeyboardInterrupt:
print("")
# show horizontal line
def hline(self, len=72):
self.info("─" * len)
# crop/pad string to fixed length
def strfit(self, str, max):
str = str.strip() or "-"
if str.startswith('(') and str.endswith(')'): str = str[1:-1]
# crop long strings
if len(str) > max:
str = str[0:max-1] + "…"
# pad short strings
return str.ljust(max)
# ----------------------------------------------------------------------
class conv():
# return current time
def now(self):
return int(time.time())
# return time elapsed since unix epoch
def elapsed(self, date, div=1, short=False):
date = str(datetime.timedelta(seconds=int(date)/div))
return date.split(",")[0] if short else date
# return date dependend on current year
def lsdate(self, date):
year1 = datetime.datetime.now().year
year2 = datetime.datetime.fromtimestamp(date).year
pdate = '%b %e ' if os.name == 'posix' else '%b %d '
format = pdate + "%H:%M" if year1 == year2 else pdate + " %Y"
return time.strftime(format, time.localtime(date))
# return date plus/minus given seconds
def timediff(self, seconds):
return self.lsdate(self.now() + self.int(seconds) / 1000)
# convert size to human readble value
def filesize(self, num):
num = self.int(num)
for unit in ['B','K','M']:
if abs(num) < 1024.0:
return (("%4.1f%s" if unit == 'M' else "%4.0f%s") % (num, unit))
num /= 1024.0
# remove carriage return from line breaks
def nstrip(self, data):
return re.sub(r'\r\n', '\n', data)
# convert string to hexdecimal
def hex(self, data, sep=''):
return sep.join("{:02x}".format(ord(c)) for c in data)
# convert to ascii character
def chr(self, num):
return chr(self.int(num))
# convert to integer or zero
def int(self, num):
try: n = int(num)
except ValueError: n = 0
return n
# ----------------------------------------------------------------------
class file():
# read from local file
def read(self, path):
try:
with open(path, mode='rb') as f:
data = f.read()
f.close()
return data
except IOError as e:
output().errmsg("Cannot read from file", e)
# write to local file
def write(self, path, data, m='wb'):
try:
with open(path, mode=m) as f:
f.write(data)
f.close()
except IOError as e:
output().errmsg("Cannot write to file", e)
# append to local file
def append(self, path, data):
self.write(path, data, 'ab+')
# ----------------------------------------------------------------------
class conn(object):
# create debug connection object
def __init__(self, mode, debug, quiet):
self.mode = mode
self.debug = debug
self.quiet = quiet
self._file = None
self._sock = socket()
# open connection
def open(self, target, port=9100):
# target is a character device
if os.path.exists(target) \
and stat.S_ISCHR(os.stat(target).st_mode):
self._file = os.open(target, os.O_RDWR)
# treat target as ipv4 socket
else:
m = re.search('^(.+?):([0-9]+)$', target)
if m:
[target, port] = m.groups()
port = int(port)
self._sock.connect((target, port))
# close connection
def close(self, *arg):
# close file descriptor
if self._file: os.close(self._file)
# close inet socket
else: self._sock.close()
# set timeout
def timeout(self, *arg):
self._sock.settimeout(*arg)
# send data
def send(self, data):
if self.debug: output().send(self.beautify(data), self.debug)
# send data to device
if self._file: return os.write(self._file, data)
# send data to socket
elif self._sock: return self._sock.sendall(data.encode())
# receive data
def recv(self, bytes):
# receive data from device
if self._file: data = os.read(self._file, bytes).decode()
# receive data from socket
else: data = self._sock.recv(bytes).decode()
# output recv data when in debug mode
if self.debug: output().recv(self.beautify(data), self.debug)
return data
# so-many-seconds-passed bool condition
def past(self, seconds, watchdog):
return int(watchdog * 100) % (seconds * 100) == 0
# connection-feels-slow bool condition
def slow(self, limit, watchdog):
return not (self.quiet or self.debug) and watchdog > limit
# receive data until a delimiter is reached
def recv_until(self, delimiter, fb=True, crop=True, binary=False):
data = ""
sleep = 0.01 # pause in recv loop
limit = 3.0 # max watchdog overrun
wd = 0.0 # watchdog timeout counter
r = re.compile(delimiter, re.DOTALL)
s = re.compile("^\x04?\x0d?\x0a?" + delimiter, re.DOTALL)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
while not r.search(data):
data += self.recv(4096) # receive actual data
if self.past(limit, wd): wd_old, bytes = wd, len(data)
wd += sleep # workaround for endless loop w/o socket timeout
time.sleep(sleep) # happens on some devices - python socket error?
# timeout plus it seems we are not receiving data anymore
if wd > self._sock.gettimeout() and wd >= wd_old + limit:
if len(data) == bytes:
output().errmsg("Receiving data failed", "watchdog timeout")
break
# visual feedback on large/slow data transfers
if self.slow(limit, wd) and self.past(0.1, wd) and len(data) > 0:
output().chitchat(str(len(data)) + " bytes received\r", '')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# clear current line from 'so-many bytes received' chit-chat
if self.slow(limit, wd): output().chitchat(' ' * 24 + "\r", '')
# warn if feedback expected but response empty (= delimiter only)
# this also happens for data received out of order (e.g. brother)
if fb and s.search(data): output().chitchat("No data received.")
# remove delimiter itself from data
if crop: data = r.sub('', data)
# crop uel sequence at the beginning
data = re.sub(r'(^' + const.UEL + ')', '', data)
'''
┌─────────────────────────────────────────────────────────────────────────┐
│ delimiters -- note that carriage return (0d) is optional in ps/pjl │
├─────────────────────────┬─────────────────────────┬─────────────────────┤
│ │ PJL │ PostScript │
├─────────────────────────┼─────────┬───────────────┼────────┬────────────┤
│ │ send │ recv │ send │ recv │
├─────────────────────────┼─────────┼───────────────┼────────┼────────────┤
│ normal commands (ascii) │ 0d? 0a │ 0d+ 0a 0c 04? │ 0d? 0a │ 0d? 0a 04? │
├─────────────────────────┼─────────┼───────────────┼────────┼────────────┤
│ file transfers (binary) │ 0d? 0a │ 0c │ 0d? 0a │ - │
└─────────────────────────┴─────────┴───────────────┴────────┴────────────┘
'''
# crop end-of-transmission chars
if self.mode == 'ps':
data = re.sub(r'^\x04', '', data)
if not binary: data = re.sub(r'\x0d?\x0a\x04?$', '', data)
else: # pjl and pcl mode
if binary: data = re.sub(r'\x0c$', '', data)
else: data = re.sub(r'\x0d+\x0a\x0c\x04?$', '', data)
# crop whitespaces/newline as feedback
if not binary: data = data.strip()
return data
# beautify debug output
def beautify(self, data):
# remove sent/recv uel sequences
data = re.sub(r'' + const.UEL, '', data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if self.mode == 'ps':
# remove sent postscript header
data = re.sub(r'' + re.escape(const.PS_HEADER), '', data)
# remove sent postscript hack
data = re.sub(r'' + re.escape(const.PS_IOHACK), '', data)
# remove sent delimiter token
data = re.sub(r'\(DELIMITER\d+\\n\) print flush\n', '', data)
# remove recv delimiter token
data = re.sub(r'DELIMITER\d+', '', data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
elif self.mode == 'pjl':
# remove sent/recv delimiter token
data = re.sub(r'@PJL ECHO\s+DELIMITER\d+', '', data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
elif self.mode == 'pcl':
# remove sent delimiter token
data = re.sub(r'\x1b\*s-\d+X', '', data)
# remove recv delimiter token
data = re.sub(r'PCL\x0d?\x0a?\x0c?ECHO -\d+', '', data)
# replace sent escape sequences
data = re.sub(r'(' + const.ESC + ')', '<Esc>', data)
pass
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# replace lineseps in between
data = re.sub(r'\x0d?\x0a?\x0c', os.linesep, data)
# remove eot/eof sequences
data = data.strip(const.EOF)
return data
# ----------------------------------------------------------------------
class const(): # define constants
SEP = '/' # use posixoid path separator
EOL = '\r\n' # line feed || carriage return
ESC = '\x1b' # used to start escape sequences
UEL = ESC + '%-12345X' # universal exit language
EOF = EOL + '\x0c\x04' # potential end of file chars
DELIMITER = "DELIMITER" # delimiter marking end of repsonse
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PS_CATCH = '%%\[ (.*)\]%%'
PS_ERROR = '%%\[ Error: (.*)\]%%'
PS_FLUSH = '%%\[ Flushing: (.*)\]%%'
PS_PROMPT = '>' # TBD: could be derived from PS command 'prompt'
PS_HEADER = '@PJL ENTER LANGUAGE = POSTSCRIPT\n%!\n'
PS_GLOBAL = 'true 0 startjob pop\n' # 'serverdict begin 0 exitserver'
PS_SUPER = '\n1183615869 internaldict /superexec get exec'
PS_NOHOOK = '/nohook true def\n'
PS_IOHACK = '/print {(%stdout) (w) file dup 3 2 roll writestring flushfile} def\n'\
'/== {128 string cvs print (\\n) print} def\n'
PCL_HEADER = '@PJL ENTER LANGUAGE = PCL' + EOL + ESC
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
SUPERBLOCK = '31337' # define super macro id to contain pclfs table
BLOCKRANGE = list(range(10000,20000)) # use those macros for file content
FILE_EXISTS = -1 # file size to be returned if file/dir size unknown
NONEXISTENT = -2 # file size to be returned if a file does not exist
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PS_VOL = '' # no default volume in ps (read: any, write: first)
PJL_VOL = '0:' + SEP # default pjl volume name || path seperator
| RUB-NDS/PRET | helper.py | Python | gpl-2.0 | 21,528 | 0.019595 |
import nacl.exceptions
import nacl.utils
import nacl.secret
from salty.config import encoder, Store
from salty.exceptions import NoValidKeyFound, DefaultKeyNotSet
__all__ = ['new', 'current', 'select', 'add_secret', 'get_secret', 'encrypt', 'decrypt']
def _new():
return encoder.encode(nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE))
def _box(key):
assert type(key) is bytes
return nacl.secret.SecretBox(key, encoder=encoder)
def _encrypt(message, key=None):
assert type(message) is bytes
if key is None:
if store.current is None:
raise DefaultKeyNotSet
key = bytes(store.current, 'utf8')
return _box(key).encrypt(message, encoder=encoder)
return _box(key).encrypt(message, encoder=encoder)
def _decrypt(name, key=None):
assert type(name) is bytes
if key is None:
for k in store.keys:
dk = bytes(k, 'utf8')
try:
return _box(dk).decrypt(name, encoder=encoder)
except nacl.exceptions.CryptoError:
continue
raise NoValidKeyFound
return _box(key).decrypt(name, encoder=encoder)
store = Store(default_key=_new().decode())
# public api
def new():
return _new()
def current(key=None):
if key is None:
return store.current
store.add_key(bytes(key, 'utf8'), current=True)
return True
def select(pos):
store.set_current(pos)
return True
def add_secret(name, raw):
msg = _encrypt(bytes(raw, 'utf8'), bytes(store.current, 'utf8'))
store.add_secret(name, msg)
return True
def get_secret(name):
msg = store.get_secret(name)
return _decrypt(bytes(msg, 'utf8'))
def secrets():
return store.secrets
def decrypt(name, key=None):
key = key or current()
return _decrypt(name, key)
def encrypt(message, key=None):
key = key or current()
return _encrypt(message, key)
| Markcial/salty | salty/api.py | Python | mit | 1,918 | 0.000521 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
import translate.storage.base
import pootle_store.fields
import pootle.core.mixins.treeitem
import pootle.core.storage
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('pootle_translationproject', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('pootle_app', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Store',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('file', pootle_store.fields.TranslationStoreField(storage=pootle.core.storage.PootleFileSystemStorage(), upload_to=b'', max_length=255, editable=False, db_index=True)),
('pootle_path', models.CharField(max_length=255, verbose_name='Path', db_index=True)),
('name', models.CharField(max_length=128, editable=False)),
('file_mtime', models.DateTimeField(default=datetime.datetime(1, 1, 1, 0, 0, tzinfo=utc))),
('state', models.IntegerField(default=0, editable=False, db_index=True)),
('creation_time', models.DateTimeField(db_index=True, auto_now_add=True, null=True)),
('last_sync_revision', models.IntegerField(null=True, db_index=True)),
('obsolete', models.BooleanField(default=False)),
('parent', models.ForeignKey(related_name='child_stores', editable=False, to='pootle_app.Directory')),
('translation_project', models.ForeignKey(related_name='stores', editable=False, to='pootle_translationproject.TranslationProject')),
],
options={
'ordering': ['pootle_path'],
},
bases=(models.Model, pootle.core.mixins.treeitem.CachedTreeItem, translate.storage.base.TranslationStore),
),
migrations.RunSQL('ALTER TABLE `pootle_store_store` ROW_FORMAT=DYNAMIC'),
migrations.AlterField(
model_name='store',
name='pootle_path',
field=models.CharField(unique=True, max_length=255, verbose_name='Path', db_index=True)
),
migrations.CreateModel(
name='Unit',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('index', models.IntegerField(db_index=True)),
('unitid', models.TextField(editable=False)),
('unitid_hash', models.CharField(max_length=32, editable=False, db_index=True)),
('source_f', pootle_store.fields.MultiStringField(null=True)),
('source_hash', models.CharField(max_length=32, editable=False, db_index=True)),
('source_wordcount', models.SmallIntegerField(default=0, editable=False)),
('source_length', models.SmallIntegerField(default=0, editable=False, db_index=True)),
('target_f', pootle_store.fields.MultiStringField(null=True, blank=True)),
('target_wordcount', models.SmallIntegerField(default=0, editable=False)),
('target_length', models.SmallIntegerField(default=0, editable=False, db_index=True)),
('developer_comment', models.TextField(null=True, blank=True)),
('translator_comment', models.TextField(null=True, blank=True)),
('locations', models.TextField(null=True, editable=False)),
('context', models.TextField(null=True, editable=False)),
('state', models.IntegerField(default=0, db_index=True)),
('revision', models.IntegerField(default=0, db_index=True, blank=True)),
('creation_time', models.DateTimeField(db_index=True, auto_now_add=True, null=True)),
('mtime', models.DateTimeField(auto_now=True, auto_now_add=True, db_index=True)),
('submitted_on', models.DateTimeField(null=True, db_index=True)),
('commented_on', models.DateTimeField(null=True, db_index=True)),
('reviewed_on', models.DateTimeField(null=True, db_index=True)),
('commented_by', models.ForeignKey(related_name='commented', to=settings.AUTH_USER_MODEL, null=True)),
('reviewed_by', models.ForeignKey(related_name='reviewed', to=settings.AUTH_USER_MODEL, null=True)),
('store', models.ForeignKey(to='pootle_store.Store')),
('submitted_by', models.ForeignKey(related_name='submitted', to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ['store', 'index'],
'get_latest_by': 'mtime',
},
bases=(models.Model, translate.storage.base.TranslationUnit),
),
migrations.CreateModel(
name='Suggestion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('target_f', pootle_store.fields.MultiStringField()),
('target_hash', models.CharField(max_length=32, db_index=True)),
('translator_comment_f', models.TextField(null=True, blank=True)),
('state', models.CharField(default=b'pending', max_length=16, db_index=True, choices=[(b'pending', 'Pending'), (b'accepted', 'Accepted'), (b'rejected', 'Rejected')])),
('creation_time', models.DateTimeField(null=True, db_index=True)),
('review_time', models.DateTimeField(null=True, db_index=True)),
('unit', models.ForeignKey(to='pootle_store.Unit')),
('reviewer', models.ForeignKey(related_name='reviews', to=settings.AUTH_USER_MODEL, null=True)),
('user', models.ForeignKey(related_name='suggestions', to=settings.AUTH_USER_MODEL, null=True)),
],
options={
},
bases=(models.Model, translate.storage.base.TranslationUnit),
),
migrations.CreateModel(
name='QualityCheck',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, db_index=True)),
('category', models.IntegerField(default=0)),
('message', models.TextField()),
('false_positive', models.BooleanField(default=False, db_index=True)),
('unit', models.ForeignKey(to='pootle_store.Unit')),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='unit',
unique_together=set([('store', 'unitid_hash')]),
),
migrations.AlterUniqueTogether(
name='store',
unique_together=set([('parent', 'name')]),
),
]
| Yelp/pootle | pootle/apps/pootle_store/migrations/0001_initial.py | Python | gpl-3.0 | 7,129 | 0.005611 |
from ert.cwrap import CWrapper, BaseCClass
from ert.enkf import ENKF_LIB
from ert.util import StringList
class SummaryKeyMatcher(BaseCClass):
def __init__(self):
c_ptr = SummaryKeyMatcher.cNamespace().alloc()
super(SummaryKeyMatcher, self).__init__(c_ptr)
def addSummaryKey(self, key):
assert isinstance(key, str)
return SummaryKeyMatcher.cNamespace().add_key(self, key)
def __len__(self):
return SummaryKeyMatcher.cNamespace().size(self)
def __contains__(self, key):
return SummaryKeyMatcher.cNamespace().match_key(self, key)
def isRequired(self, key):
""" @rtype: bool """
return SummaryKeyMatcher.cNamespace().is_required(self, key)
def keys(self):
""" @rtype: StringList """
return SummaryKeyMatcher.cNamespace().keys(self)
def free(self):
SummaryKeyMatcher.cNamespace().free(self)
cwrapper = CWrapper(ENKF_LIB)
cwrapper.registerObjectType("summary_key_matcher", SummaryKeyMatcher)
SummaryKeyMatcher.cNamespace().alloc = cwrapper.prototype("c_void_p summary_key_matcher_alloc()")
SummaryKeyMatcher.cNamespace().free = cwrapper.prototype("void summary_key_matcher_free(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().size = cwrapper.prototype("int summary_key_matcher_get_size(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().add_key = cwrapper.prototype("void summary_key_matcher_add_summary_key(summary_key_matcher, char*)")
SummaryKeyMatcher.cNamespace().match_key = cwrapper.prototype("bool summary_key_matcher_match_summary_key(summary_key_matcher, char*)")
SummaryKeyMatcher.cNamespace().keys = cwrapper.prototype("stringlist_obj summary_key_matcher_get_keys(summary_key_matcher)")
SummaryKeyMatcher.cNamespace().is_required = cwrapper.prototype("bool summary_key_matcher_summary_key_is_required(summary_key_matcher, char*)")
| iLoop2/ResInsight | ThirdParty/Ert/devel/python/python/ert/enkf/summary_key_matcher.py | Python | gpl-3.0 | 1,882 | 0.007439 |
# -*- coding: utf-8 -*-
"""
The rrule module offers a small, complete, and very fast, implementation of
the recurrence rules documented in the
`iCalendar RFC <https://tools.ietf.org/html/rfc5545>`_,
including support for caching of results.
"""
import itertools
import datetime
import calendar
import re
import sys
try:
from math import gcd
except ImportError:
from fractions import gcd
from six import advance_iterator, integer_types
from six.moves import _thread, range
import heapq
from ._common import weekday as weekdaybase
from .tz import tzutc, tzlocal
# For warning about deprecation of until and count
from warnings import warn
__all__ = ["rrule", "rruleset", "rrulestr",
"YEARLY", "MONTHLY", "WEEKLY", "DAILY",
"HOURLY", "MINUTELY", "SECONDLY",
"MO", "TU", "WE", "TH", "FR", "SA", "SU"]
# Every mask is 7 days longer to handle cross-year weekly periods.
M366MASK = tuple([1]*31+[2]*29+[3]*31+[4]*30+[5]*31+[6]*30 +
[7]*31+[8]*31+[9]*30+[10]*31+[11]*30+[12]*31+[1]*7)
M365MASK = list(M366MASK)
M29, M30, M31 = list(range(1, 30)), list(range(1, 31)), list(range(1, 32))
MDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
MDAY365MASK = list(MDAY366MASK)
M29, M30, M31 = list(range(-29, 0)), list(range(-30, 0)), list(range(-31, 0))
NMDAY366MASK = tuple(M31+M29+M31+M30+M31+M30+M31+M31+M30+M31+M30+M31+M31[:7])
NMDAY365MASK = list(NMDAY366MASK)
M366RANGE = (0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366)
M365RANGE = (0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365)
WDAYMASK = [0, 1, 2, 3, 4, 5, 6]*55
del M29, M30, M31, M365MASK[59], MDAY365MASK[59], NMDAY365MASK[31]
MDAY365MASK = tuple(MDAY365MASK)
M365MASK = tuple(M365MASK)
FREQNAMES = ['YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY', 'HOURLY', 'MINUTELY', 'SECONDLY']
(YEARLY,
MONTHLY,
WEEKLY,
DAILY,
HOURLY,
MINUTELY,
SECONDLY) = list(range(7))
# Imported on demand.
easter = None
parser = None
class weekday(weekdaybase):
"""
This version of weekday does not allow n = 0.
"""
def __init__(self, wkday, n=None):
if n == 0:
raise ValueError("Can't create weekday with n==0")
super(weekday, self).__init__(wkday, n)
MO, TU, WE, TH, FR, SA, SU = weekdays = tuple(weekday(x) for x in range(7))
def _invalidates_cache(f):
"""
Decorator for rruleset methods which may invalidate the
cached length.
"""
def inner_func(self, *args, **kwargs):
rv = f(self, *args, **kwargs)
self._invalidate_cache()
return rv
return inner_func
class rrulebase(object):
def __init__(self, cache=False):
if cache:
self._cache = []
self._cache_lock = _thread.allocate_lock()
self._invalidate_cache()
else:
self._cache = None
self._cache_complete = False
self._len = None
def __iter__(self):
if self._cache_complete:
return iter(self._cache)
elif self._cache is None:
return self._iter()
else:
return self._iter_cached()
def _invalidate_cache(self):
if self._cache is not None:
self._cache = []
self._cache_complete = False
self._cache_gen = self._iter()
if self._cache_lock.locked():
self._cache_lock.release()
self._len = None
def _iter_cached(self):
i = 0
gen = self._cache_gen
cache = self._cache
acquire = self._cache_lock.acquire
release = self._cache_lock.release
while gen:
if i == len(cache):
acquire()
if self._cache_complete:
break
try:
for j in range(10):
cache.append(advance_iterator(gen))
except StopIteration:
self._cache_gen = gen = None
self._cache_complete = True
break
release()
yield cache[i]
i += 1
while i < self._len:
yield cache[i]
i += 1
def __getitem__(self, item):
if self._cache_complete:
return self._cache[item]
elif isinstance(item, slice):
if item.step and item.step < 0:
return list(iter(self))[item]
else:
return list(itertools.islice(self,
item.start or 0,
item.stop or sys.maxsize,
item.step or 1))
elif item >= 0:
gen = iter(self)
try:
for i in range(item+1):
res = advance_iterator(gen)
except StopIteration:
raise IndexError
return res
else:
return list(iter(self))[item]
def __contains__(self, item):
if self._cache_complete:
return item in self._cache
else:
for i in self:
if i == item:
return True
elif i > item:
return False
return False
# __len__() introduces a large performance penality.
def count(self):
""" Returns the number of recurrences in this set. It will have go
trough the whole recurrence, if this hasn't been done before. """
if self._len is None:
for x in self:
pass
return self._len
def before(self, dt, inc=False):
""" Returns the last recurrence before the given datetime instance. The
inc keyword defines what happens if dt is an occurrence. With
inc=True, if dt itself is an occurrence, it will be returned. """
if self._cache_complete:
gen = self._cache
else:
gen = self
last = None
if inc:
for i in gen:
if i > dt:
break
last = i
else:
for i in gen:
if i >= dt:
break
last = i
return last
def after(self, dt, inc=False):
""" Returns the first recurrence after the given datetime instance. The
inc keyword defines what happens if dt is an occurrence. With
inc=True, if dt itself is an occurrence, it will be returned. """
if self._cache_complete:
gen = self._cache
else:
gen = self
if inc:
for i in gen:
if i >= dt:
return i
else:
for i in gen:
if i > dt:
return i
return None
def xafter(self, dt, count=None, inc=False):
"""
Generator which yields up to `count` recurrences after the given
datetime instance, equivalent to `after`.
:param dt:
The datetime at which to start generating recurrences.
:param count:
The maximum number of recurrences to generate. If `None` (default),
dates are generated until the recurrence rule is exhausted.
:param inc:
If `dt` is an instance of the rule and `inc` is `True`, it is
included in the output.
:yields: Yields a sequence of `datetime` objects.
"""
if self._cache_complete:
gen = self._cache
else:
gen = self
# Select the comparison function
if inc:
comp = lambda dc, dtc: dc >= dtc
else:
comp = lambda dc, dtc: dc > dtc
# Generate dates
n = 0
for d in gen:
if comp(d, dt):
if count is not None:
n += 1
if n > count:
break
yield d
def between(self, after, before, inc=False, count=1):
""" Returns all the occurrences of the rrule between after and before.
The inc keyword defines what happens if after and/or before are
themselves occurrences. With inc=True, they will be included in the
list, if they are found in the recurrence set. """
if self._cache_complete:
gen = self._cache
else:
gen = self
started = False
l = []
if inc:
for i in gen:
if i > before:
break
elif not started:
if i >= after:
started = True
l.append(i)
else:
l.append(i)
else:
for i in gen:
if i >= before:
break
elif not started:
if i > after:
started = True
l.append(i)
else:
l.append(i)
return l
class rrule(rrulebase):
"""
That's the base of the rrule operation. It accepts all the keywords
defined in the RFC as its constructor parameters (except byday,
which was renamed to byweekday) and more. The constructor prototype is::
rrule(freq)
Where freq must be one of YEARLY, MONTHLY, WEEKLY, DAILY, HOURLY, MINUTELY,
or SECONDLY.
.. note::
Per RFC section 3.3.10, recurrence instances falling on invalid dates
and times are ignored rather than coerced:
Recurrence rules may generate recurrence instances with an invalid
date (e.g., February 30) or nonexistent local time (e.g., 1:30 AM
on a day where the local time is moved forward by an hour at 1:00
AM). Such recurrence instances MUST be ignored and MUST NOT be
counted as part of the recurrence set.
This can lead to possibly surprising behavior when, for example, the
start date occurs at the end of the month:
>>> from dateutil.rrule import rrule, MONTHLY
>>> from datetime import datetime
>>> start_date = datetime(2014, 12, 31)
>>> list(rrule(freq=MONTHLY, count=4, dtstart=start_date))
... # doctest: +NORMALIZE_WHITESPACE
[datetime.datetime(2014, 12, 31, 0, 0),
datetime.datetime(2015, 1, 31, 0, 0),
datetime.datetime(2015, 3, 31, 0, 0),
datetime.datetime(2015, 5, 31, 0, 0)]
Additionally, it supports the following keyword arguments:
:param cache:
If given, it must be a boolean value specifying to enable or disable
caching of results. If you will use the same rrule instance multiple
times, enabling caching will improve the performance considerably.
:param dtstart:
The recurrence start. Besides being the base for the recurrence,
missing parameters in the final recurrence instances will also be
extracted from this date. If not given, datetime.now() will be used
instead.
:param interval:
The interval between each freq iteration. For example, when using
YEARLY, an interval of 2 means once every two years, but with HOURLY,
it means once every two hours. The default interval is 1.
:param wkst:
The week start day. Must be one of the MO, TU, WE constants, or an
integer, specifying the first day of the week. This will affect
recurrences based on weekly periods. The default week start is got
from calendar.firstweekday(), and may be modified by
calendar.setfirstweekday().
:param count:
How many occurrences will be generated.
.. note::
As of version 2.5.0, the use of the ``until`` keyword together
with the ``count`` keyword is deprecated per RFC-5545 Sec. 3.3.10.
:param until:
If given, this must be a datetime instance, that will specify the
limit of the recurrence. The last recurrence in the rule is the greatest
datetime that is less than or equal to the value specified in the
``until`` parameter.
.. note::
As of version 2.5.0, the use of the ``until`` keyword together
with the ``count`` keyword is deprecated per RFC-5545 Sec. 3.3.10.
:param bysetpos:
If given, it must be either an integer, or a sequence of integers,
positive or negative. Each given integer will specify an occurrence
number, corresponding to the nth occurrence of the rule inside the
frequency period. For example, a bysetpos of -1 if combined with a
MONTHLY frequency, and a byweekday of (MO, TU, WE, TH, FR), will
result in the last work day of every month.
:param bymonth:
If given, it must be either an integer, or a sequence of integers,
meaning the months to apply the recurrence to.
:param bymonthday:
If given, it must be either an integer, or a sequence of integers,
meaning the month days to apply the recurrence to.
:param byyearday:
If given, it must be either an integer, or a sequence of integers,
meaning the year days to apply the recurrence to.
:param byweekno:
If given, it must be either an integer, or a sequence of integers,
meaning the week numbers to apply the recurrence to. Week numbers
have the meaning described in ISO8601, that is, the first week of
the year is that containing at least four days of the new year.
:param byweekday:
If given, it must be either an integer (0 == MO), a sequence of
integers, one of the weekday constants (MO, TU, etc), or a sequence
of these constants. When given, these variables will define the
weekdays where the recurrence will be applied. It's also possible to
use an argument n for the weekday instances, which will mean the nth
occurrence of this weekday in the period. For example, with MONTHLY,
or with YEARLY and BYMONTH, using FR(+1) in byweekday will specify the
first friday of the month where the recurrence happens. Notice that in
the RFC documentation, this is specified as BYDAY, but was renamed to
avoid the ambiguity of that keyword.
:param byhour:
If given, it must be either an integer, or a sequence of integers,
meaning the hours to apply the recurrence to.
:param byminute:
If given, it must be either an integer, or a sequence of integers,
meaning the minutes to apply the recurrence to.
:param bysecond:
If given, it must be either an integer, or a sequence of integers,
meaning the seconds to apply the recurrence to.
:param byeaster:
If given, it must be either an integer, or a sequence of integers,
positive or negative. Each integer will define an offset from the
Easter Sunday. Passing the offset 0 to byeaster will yield the Easter
Sunday itself. This is an extension to the RFC specification.
"""
def __init__(self, freq, dtstart=None,
interval=1, wkst=None, count=None, until=None, bysetpos=None,
bymonth=None, bymonthday=None, byyearday=None, byeaster=None,
byweekno=None, byweekday=None,
byhour=None, byminute=None, bysecond=None,
cache=False):
super(rrule, self).__init__(cache)
global easter
if not dtstart:
dtstart = datetime.datetime.now().replace(microsecond=0)
elif not isinstance(dtstart, datetime.datetime):
dtstart = datetime.datetime.fromordinal(dtstart.toordinal())
else:
dtstart = dtstart.replace(microsecond=0)
self._dtstart = dtstart
self._tzinfo = dtstart.tzinfo
self._freq = freq
self._interval = interval
self._count = count
# Cache the original byxxx rules, if they are provided, as the _byxxx
# attributes do not necessarily map to the inputs, and this can be
# a problem in generating the strings. Only store things if they've
# been supplied (the string retrieval will just use .get())
self._original_rule = {}
if until and not isinstance(until, datetime.datetime):
until = datetime.datetime.fromordinal(until.toordinal())
self._until = until
if self._dtstart and self._until:
if (self._dtstart.tzinfo is not None) != (self._until.tzinfo is not None):
# According to RFC5545 Section 3.3.10:
# https://tools.ietf.org/html/rfc5545#section-3.3.10
#
# > If the "DTSTART" property is specified as a date with UTC
# > time or a date with local time and time zone reference,
# > then the UNTIL rule part MUST be specified as a date with
# > UTC time.
raise ValueError(
'RRULE UNTIL values must be specified in UTC when DTSTART '
'is timezone-aware'
)
if count is not None and until:
warn("Using both 'count' and 'until' is inconsistent with RFC 5545"
" and has been deprecated in dateutil. Future versions will "
"raise an error.", DeprecationWarning)
if wkst is None:
self._wkst = calendar.firstweekday()
elif isinstance(wkst, integer_types):
self._wkst = wkst
else:
self._wkst = wkst.weekday
if bysetpos is None:
self._bysetpos = None
elif isinstance(bysetpos, integer_types):
if bysetpos == 0 or not (-366 <= bysetpos <= 366):
raise ValueError("bysetpos must be between 1 and 366, "
"or between -366 and -1")
self._bysetpos = (bysetpos,)
else:
self._bysetpos = tuple(bysetpos)
for pos in self._bysetpos:
if pos == 0 or not (-366 <= pos <= 366):
raise ValueError("bysetpos must be between 1 and 366, "
"or between -366 and -1")
if self._bysetpos:
self._original_rule['bysetpos'] = self._bysetpos
if (byweekno is None and byyearday is None and bymonthday is None and
byweekday is None and byeaster is None):
if freq == YEARLY:
if bymonth is None:
bymonth = dtstart.month
self._original_rule['bymonth'] = None
bymonthday = dtstart.day
self._original_rule['bymonthday'] = None
elif freq == MONTHLY:
bymonthday = dtstart.day
self._original_rule['bymonthday'] = None
elif freq == WEEKLY:
byweekday = dtstart.weekday()
self._original_rule['byweekday'] = None
# bymonth
if bymonth is None:
self._bymonth = None
else:
if isinstance(bymonth, integer_types):
bymonth = (bymonth,)
self._bymonth = tuple(sorted(set(bymonth)))
if 'bymonth' not in self._original_rule:
self._original_rule['bymonth'] = self._bymonth
# byyearday
if byyearday is None:
self._byyearday = None
else:
if isinstance(byyearday, integer_types):
byyearday = (byyearday,)
self._byyearday = tuple(sorted(set(byyearday)))
self._original_rule['byyearday'] = self._byyearday
# byeaster
if byeaster is not None:
if not easter:
from dateutil import easter
if isinstance(byeaster, integer_types):
self._byeaster = (byeaster,)
else:
self._byeaster = tuple(sorted(byeaster))
self._original_rule['byeaster'] = self._byeaster
else:
self._byeaster = None
# bymonthday
if bymonthday is None:
self._bymonthday = ()
self._bynmonthday = ()
else:
if isinstance(bymonthday, integer_types):
bymonthday = (bymonthday,)
bymonthday = set(bymonthday) # Ensure it's unique
self._bymonthday = tuple(sorted(x for x in bymonthday if x > 0))
self._bynmonthday = tuple(sorted(x for x in bymonthday if x < 0))
# Storing positive numbers first, then negative numbers
if 'bymonthday' not in self._original_rule:
self._original_rule['bymonthday'] = tuple(
itertools.chain(self._bymonthday, self._bynmonthday))
# byweekno
if byweekno is None:
self._byweekno = None
else:
if isinstance(byweekno, integer_types):
byweekno = (byweekno,)
self._byweekno = tuple(sorted(set(byweekno)))
self._original_rule['byweekno'] = self._byweekno
# byweekday / bynweekday
if byweekday is None:
self._byweekday = None
self._bynweekday = None
else:
# If it's one of the valid non-sequence types, convert to a
# single-element sequence before the iterator that builds the
# byweekday set.
if isinstance(byweekday, integer_types) or hasattr(byweekday, "n"):
byweekday = (byweekday,)
self._byweekday = set()
self._bynweekday = set()
for wday in byweekday:
if isinstance(wday, integer_types):
self._byweekday.add(wday)
elif not wday.n or freq > MONTHLY:
self._byweekday.add(wday.weekday)
else:
self._bynweekday.add((wday.weekday, wday.n))
if not self._byweekday:
self._byweekday = None
elif not self._bynweekday:
self._bynweekday = None
if self._byweekday is not None:
self._byweekday = tuple(sorted(self._byweekday))
orig_byweekday = [weekday(x) for x in self._byweekday]
else:
orig_byweekday = ()
if self._bynweekday is not None:
self._bynweekday = tuple(sorted(self._bynweekday))
orig_bynweekday = [weekday(*x) for x in self._bynweekday]
else:
orig_bynweekday = ()
if 'byweekday' not in self._original_rule:
self._original_rule['byweekday'] = tuple(itertools.chain(
orig_byweekday, orig_bynweekday))
# byhour
if byhour is None:
if freq < HOURLY:
self._byhour = {dtstart.hour}
else:
self._byhour = None
else:
if isinstance(byhour, integer_types):
byhour = (byhour,)
if freq == HOURLY:
self._byhour = self.__construct_byset(start=dtstart.hour,
byxxx=byhour,
base=24)
else:
self._byhour = set(byhour)
self._byhour = tuple(sorted(self._byhour))
self._original_rule['byhour'] = self._byhour
# byminute
if byminute is None:
if freq < MINUTELY:
self._byminute = {dtstart.minute}
else:
self._byminute = None
else:
if isinstance(byminute, integer_types):
byminute = (byminute,)
if freq == MINUTELY:
self._byminute = self.__construct_byset(start=dtstart.minute,
byxxx=byminute,
base=60)
else:
self._byminute = set(byminute)
self._byminute = tuple(sorted(self._byminute))
self._original_rule['byminute'] = self._byminute
# bysecond
if bysecond is None:
if freq < SECONDLY:
self._bysecond = ((dtstart.second,))
else:
self._bysecond = None
else:
if isinstance(bysecond, integer_types):
bysecond = (bysecond,)
self._bysecond = set(bysecond)
if freq == SECONDLY:
self._bysecond = self.__construct_byset(start=dtstart.second,
byxxx=bysecond,
base=60)
else:
self._bysecond = set(bysecond)
self._bysecond = tuple(sorted(self._bysecond))
self._original_rule['bysecond'] = self._bysecond
if self._freq >= HOURLY:
self._timeset = None
else:
self._timeset = []
for hour in self._byhour:
for minute in self._byminute:
for second in self._bysecond:
self._timeset.append(
datetime.time(hour, minute, second,
tzinfo=self._tzinfo))
self._timeset.sort()
self._timeset = tuple(self._timeset)
def __str__(self):
"""
Output a string that would generate this RRULE if passed to rrulestr.
This is mostly compatible with RFC5545, except for the
dateutil-specific extension BYEASTER.
"""
output = []
h, m, s = [None] * 3
if self._dtstart:
output.append(self._dtstart.strftime('DTSTART:%Y%m%dT%H%M%S'))
h, m, s = self._dtstart.timetuple()[3:6]
parts = ['FREQ=' + FREQNAMES[self._freq]]
if self._interval != 1:
parts.append('INTERVAL=' + str(self._interval))
if self._wkst:
parts.append('WKST=' + repr(weekday(self._wkst))[0:2])
if self._count is not None:
parts.append('COUNT=' + str(self._count))
if self._until:
parts.append(self._until.strftime('UNTIL=%Y%m%dT%H%M%S'))
if self._original_rule.get('byweekday') is not None:
# The str() method on weekday objects doesn't generate
# RFC5545-compliant strings, so we should modify that.
original_rule = dict(self._original_rule)
wday_strings = []
for wday in original_rule['byweekday']:
if wday.n:
wday_strings.append('{n:+d}{wday}'.format(
n=wday.n,
wday=repr(wday)[0:2]))
else:
wday_strings.append(repr(wday))
original_rule['byweekday'] = wday_strings
else:
original_rule = self._original_rule
partfmt = '{name}={vals}'
for name, key in [('BYSETPOS', 'bysetpos'),
('BYMONTH', 'bymonth'),
('BYMONTHDAY', 'bymonthday'),
('BYYEARDAY', 'byyearday'),
('BYWEEKNO', 'byweekno'),
('BYDAY', 'byweekday'),
('BYHOUR', 'byhour'),
('BYMINUTE', 'byminute'),
('BYSECOND', 'bysecond'),
('BYEASTER', 'byeaster')]:
value = original_rule.get(key)
if value:
parts.append(partfmt.format(name=name, vals=(','.join(str(v)
for v in value))))
output.append('RRULE:' + ';'.join(parts))
return '\n'.join(output)
def replace(self, **kwargs):
"""Return new rrule with same attributes except for those attributes given new
values by whichever keyword arguments are specified."""
new_kwargs = {"interval": self._interval,
"count": self._count,
"dtstart": self._dtstart,
"freq": self._freq,
"until": self._until,
"wkst": self._wkst,
"cache": False if self._cache is None else True }
new_kwargs.update(self._original_rule)
new_kwargs.update(kwargs)
return rrule(**new_kwargs)
def _iter(self):
year, month, day, hour, minute, second, weekday, yearday, _ = \
self._dtstart.timetuple()
# Some local variables to speed things up a bit
freq = self._freq
interval = self._interval
wkst = self._wkst
until = self._until
bymonth = self._bymonth
byweekno = self._byweekno
byyearday = self._byyearday
byweekday = self._byweekday
byeaster = self._byeaster
bymonthday = self._bymonthday
bynmonthday = self._bynmonthday
bysetpos = self._bysetpos
byhour = self._byhour
byminute = self._byminute
bysecond = self._bysecond
ii = _iterinfo(self)
ii.rebuild(year, month)
getdayset = {YEARLY: ii.ydayset,
MONTHLY: ii.mdayset,
WEEKLY: ii.wdayset,
DAILY: ii.ddayset,
HOURLY: ii.ddayset,
MINUTELY: ii.ddayset,
SECONDLY: ii.ddayset}[freq]
if freq < HOURLY:
timeset = self._timeset
else:
gettimeset = {HOURLY: ii.htimeset,
MINUTELY: ii.mtimeset,
SECONDLY: ii.stimeset}[freq]
if ((freq >= HOURLY and
self._byhour and hour not in self._byhour) or
(freq >= MINUTELY and
self._byminute and minute not in self._byminute) or
(freq >= SECONDLY and
self._bysecond and second not in self._bysecond)):
timeset = ()
else:
timeset = gettimeset(hour, minute, second)
total = 0
count = self._count
while True:
# Get dayset with the right frequency
dayset, start, end = getdayset(year, month, day)
# Do the "hard" work ;-)
filtered = False
for i in dayset[start:end]:
if ((bymonth and ii.mmask[i] not in bymonth) or
(byweekno and not ii.wnomask[i]) or
(byweekday and ii.wdaymask[i] not in byweekday) or
(ii.nwdaymask and not ii.nwdaymask[i]) or
(byeaster and not ii.eastermask[i]) or
((bymonthday or bynmonthday) and
ii.mdaymask[i] not in bymonthday and
ii.nmdaymask[i] not in bynmonthday) or
(byyearday and
((i < ii.yearlen and i+1 not in byyearday and
-ii.yearlen+i not in byyearday) or
(i >= ii.yearlen and i+1-ii.yearlen not in byyearday and
-ii.nextyearlen+i-ii.yearlen not in byyearday)))):
dayset[i] = None
filtered = True
# Output results
if bysetpos and timeset:
poslist = []
for pos in bysetpos:
if pos < 0:
daypos, timepos = divmod(pos, len(timeset))
else:
daypos, timepos = divmod(pos-1, len(timeset))
try:
i = [x for x in dayset[start:end]
if x is not None][daypos]
time = timeset[timepos]
except IndexError:
pass
else:
date = datetime.date.fromordinal(ii.yearordinal+i)
res = datetime.datetime.combine(date, time)
if res not in poslist:
poslist.append(res)
poslist.sort()
for res in poslist:
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
if count is not None:
count -= 1
if count < 0:
self._len = total
return
total += 1
yield res
else:
for i in dayset[start:end]:
if i is not None:
date = datetime.date.fromordinal(ii.yearordinal + i)
for time in timeset:
res = datetime.datetime.combine(date, time)
if until and res > until:
self._len = total
return
elif res >= self._dtstart:
if count is not None:
count -= 1
if count < 0:
self._len = total
return
total += 1
yield res
# Handle frequency and interval
fixday = False
if freq == YEARLY:
year += interval
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == MONTHLY:
month += interval
if month > 12:
div, mod = divmod(month, 12)
month = mod
year += div
if month == 0:
month = 12
year -= 1
if year > datetime.MAXYEAR:
self._len = total
return
ii.rebuild(year, month)
elif freq == WEEKLY:
if wkst > weekday:
day += -(weekday+1+(6-wkst))+self._interval*7
else:
day += -(weekday-wkst)+self._interval*7
weekday = wkst
fixday = True
elif freq == DAILY:
day += interval
fixday = True
elif freq == HOURLY:
if filtered:
# Jump to one iteration before next day
hour += ((23-hour)//interval)*interval
if byhour:
ndays, hour = self.__mod_distance(value=hour,
byxxx=self._byhour,
base=24)
else:
ndays, hour = divmod(hour+interval, 24)
if ndays:
day += ndays
fixday = True
timeset = gettimeset(hour, minute, second)
elif freq == MINUTELY:
if filtered:
# Jump to one iteration before next day
minute += ((1439-(hour*60+minute))//interval)*interval
valid = False
rep_rate = (24*60)
for j in range(rep_rate // gcd(interval, rep_rate)):
if byminute:
nhours, minute = \
self.__mod_distance(value=minute,
byxxx=self._byminute,
base=60)
else:
nhours, minute = divmod(minute+interval, 60)
div, hour = divmod(hour+nhours, 24)
if div:
day += div
fixday = True
filtered = False
if not byhour or hour in byhour:
valid = True
break
if not valid:
raise ValueError('Invalid combination of interval and ' +
'byhour resulting in empty rule.')
timeset = gettimeset(hour, minute, second)
elif freq == SECONDLY:
if filtered:
# Jump to one iteration before next day
second += (((86399 - (hour * 3600 + minute * 60 + second))
// interval) * interval)
rep_rate = (24 * 3600)
valid = False
for j in range(0, rep_rate // gcd(interval, rep_rate)):
if bysecond:
nminutes, second = \
self.__mod_distance(value=second,
byxxx=self._bysecond,
base=60)
else:
nminutes, second = divmod(second+interval, 60)
div, minute = divmod(minute+nminutes, 60)
if div:
hour += div
div, hour = divmod(hour, 24)
if div:
day += div
fixday = True
if ((not byhour or hour in byhour) and
(not byminute or minute in byminute) and
(not bysecond or second in bysecond)):
valid = True
break
if not valid:
raise ValueError('Invalid combination of interval, ' +
'byhour and byminute resulting in empty' +
' rule.')
timeset = gettimeset(hour, minute, second)
if fixday and day > 28:
daysinmonth = calendar.monthrange(year, month)[1]
if day > daysinmonth:
while day > daysinmonth:
day -= daysinmonth
month += 1
if month == 13:
month = 1
year += 1
if year > datetime.MAXYEAR:
self._len = total
return
daysinmonth = calendar.monthrange(year, month)[1]
ii.rebuild(year, month)
def __construct_byset(self, start, byxxx, base):
"""
If a `BYXXX` sequence is passed to the constructor at the same level as
`FREQ` (e.g. `FREQ=HOURLY,BYHOUR={2,4,7},INTERVAL=3`), there are some
specifications which cannot be reached given some starting conditions.
This occurs whenever the interval is not coprime with the base of a
given unit and the difference between the starting position and the
ending position is not coprime with the greatest common denominator
between the interval and the base. For example, with a FREQ of hourly
starting at 17:00 and an interval of 4, the only valid values for
BYHOUR would be {21, 1, 5, 9, 13, 17}, because 4 and 24 are not
coprime.
:param start:
Specifies the starting position.
:param byxxx:
An iterable containing the list of allowed values.
:param base:
The largest allowable value for the specified frequency (e.g.
24 hours, 60 minutes).
This does not preserve the type of the iterable, returning a set, since
the values should be unique and the order is irrelevant, this will
speed up later lookups.
In the event of an empty set, raises a :exception:`ValueError`, as this
results in an empty rrule.
"""
cset = set()
# Support a single byxxx value.
if isinstance(byxxx, integer_types):
byxxx = (byxxx, )
for num in byxxx:
i_gcd = gcd(self._interval, base)
# Use divmod rather than % because we need to wrap negative nums.
if i_gcd == 1 or divmod(num - start, i_gcd)[1] == 0:
cset.add(num)
if len(cset) == 0:
raise ValueError("Invalid rrule byxxx generates an empty set.")
return cset
def __mod_distance(self, value, byxxx, base):
"""
Calculates the next value in a sequence where the `FREQ` parameter is
specified along with a `BYXXX` parameter at the same "level"
(e.g. `HOURLY` specified with `BYHOUR`).
:param value:
The old value of the component.
:param byxxx:
The `BYXXX` set, which should have been generated by
`rrule._construct_byset`, or something else which checks that a
valid rule is present.
:param base:
The largest allowable value for the specified frequency (e.g.
24 hours, 60 minutes).
If a valid value is not found after `base` iterations (the maximum
number before the sequence would start to repeat), this raises a
:exception:`ValueError`, as no valid values were found.
This returns a tuple of `divmod(n*interval, base)`, where `n` is the
smallest number of `interval` repetitions until the next specified
value in `byxxx` is found.
"""
accumulator = 0
for ii in range(1, base + 1):
# Using divmod() over % to account for negative intervals
div, value = divmod(value + self._interval, base)
accumulator += div
if value in byxxx:
return (accumulator, value)
class _iterinfo(object):
__slots__ = ["rrule", "lastyear", "lastmonth",
"yearlen", "nextyearlen", "yearordinal", "yearweekday",
"mmask", "mrange", "mdaymask", "nmdaymask",
"wdaymask", "wnomask", "nwdaymask", "eastermask"]
def __init__(self, rrule):
for attr in self.__slots__:
setattr(self, attr, None)
self.rrule = rrule
def rebuild(self, year, month):
# Every mask is 7 days longer to handle cross-year weekly periods.
rr = self.rrule
if year != self.lastyear:
self.yearlen = 365 + calendar.isleap(year)
self.nextyearlen = 365 + calendar.isleap(year + 1)
firstyday = datetime.date(year, 1, 1)
self.yearordinal = firstyday.toordinal()
self.yearweekday = firstyday.weekday()
wday = datetime.date(year, 1, 1).weekday()
if self.yearlen == 365:
self.mmask = M365MASK
self.mdaymask = MDAY365MASK
self.nmdaymask = NMDAY365MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M365RANGE
else:
self.mmask = M366MASK
self.mdaymask = MDAY366MASK
self.nmdaymask = NMDAY366MASK
self.wdaymask = WDAYMASK[wday:]
self.mrange = M366RANGE
if not rr._byweekno:
self.wnomask = None
else:
self.wnomask = [0]*(self.yearlen+7)
# no1wkst = firstwkst = self.wdaymask.index(rr._wkst)
no1wkst = firstwkst = (7-self.yearweekday+rr._wkst) % 7
if no1wkst >= 4:
no1wkst = 0
# Number of days in the year, plus the days we got
# from last year.
wyearlen = self.yearlen+(self.yearweekday-rr._wkst) % 7
else:
# Number of days in the year, minus the days we
# left in last year.
wyearlen = self.yearlen-no1wkst
div, mod = divmod(wyearlen, 7)
numweeks = div+mod//4
for n in rr._byweekno:
if n < 0:
n += numweeks+1
if not (0 < n <= numweeks):
continue
if n > 1:
i = no1wkst+(n-1)*7
if no1wkst != firstwkst:
i -= 7-firstwkst
else:
i = no1wkst
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if 1 in rr._byweekno:
# Check week number 1 of next year as well
# TODO: Check -numweeks for next year.
i = no1wkst+numweeks*7
if no1wkst != firstwkst:
i -= 7-firstwkst
if i < self.yearlen:
# If week starts in next year, we
# don't care about it.
for j in range(7):
self.wnomask[i] = 1
i += 1
if self.wdaymask[i] == rr._wkst:
break
if no1wkst:
# Check last week number of last year as
# well. If no1wkst is 0, either the year
# started on week start, or week number 1
# got days from last year, so there are no
# days from last year's last week number in
# this year.
if -1 not in rr._byweekno:
lyearweekday = datetime.date(year-1, 1, 1).weekday()
lno1wkst = (7-lyearweekday+rr._wkst) % 7
lyearlen = 365+calendar.isleap(year-1)
if lno1wkst >= 4:
lno1wkst = 0
lnumweeks = 52+(lyearlen +
(lyearweekday-rr._wkst) % 7) % 7//4
else:
lnumweeks = 52+(self.yearlen-no1wkst) % 7//4
else:
lnumweeks = -1
if lnumweeks in rr._byweekno:
for i in range(no1wkst):
self.wnomask[i] = 1
if (rr._bynweekday and (month != self.lastmonth or
year != self.lastyear)):
ranges = []
if rr._freq == YEARLY:
if rr._bymonth:
for month in rr._bymonth:
ranges.append(self.mrange[month-1:month+1])
else:
ranges = [(0, self.yearlen)]
elif rr._freq == MONTHLY:
ranges = [self.mrange[month-1:month+1]]
if ranges:
# Weekly frequency won't get here, so we may not
# care about cross-year weekly periods.
self.nwdaymask = [0]*self.yearlen
for first, last in ranges:
last -= 1
for wday, n in rr._bynweekday:
if n < 0:
i = last+(n+1)*7
i -= (self.wdaymask[i]-wday) % 7
else:
i = first+(n-1)*7
i += (7-self.wdaymask[i]+wday) % 7
if first <= i <= last:
self.nwdaymask[i] = 1
if rr._byeaster:
self.eastermask = [0]*(self.yearlen+7)
eyday = easter.easter(year).toordinal()-self.yearordinal
for offset in rr._byeaster:
self.eastermask[eyday+offset] = 1
self.lastyear = year
self.lastmonth = month
def ydayset(self, year, month, day):
return list(range(self.yearlen)), 0, self.yearlen
def mdayset(self, year, month, day):
dset = [None]*self.yearlen
start, end = self.mrange[month-1:month+1]
for i in range(start, end):
dset[i] = i
return dset, start, end
def wdayset(self, year, month, day):
# We need to handle cross-year weeks here.
dset = [None]*(self.yearlen+7)
i = datetime.date(year, month, day).toordinal()-self.yearordinal
start = i
for j in range(7):
dset[i] = i
i += 1
# if (not (0 <= i < self.yearlen) or
# self.wdaymask[i] == self.rrule._wkst):
# This will cross the year boundary, if necessary.
if self.wdaymask[i] == self.rrule._wkst:
break
return dset, start, i
def ddayset(self, year, month, day):
dset = [None] * self.yearlen
i = datetime.date(year, month, day).toordinal() - self.yearordinal
dset[i] = i
return dset, i, i + 1
def htimeset(self, hour, minute, second):
tset = []
rr = self.rrule
for minute in rr._byminute:
for second in rr._bysecond:
tset.append(datetime.time(hour, minute, second,
tzinfo=rr._tzinfo))
tset.sort()
return tset
def mtimeset(self, hour, minute, second):
tset = []
rr = self.rrule
for second in rr._bysecond:
tset.append(datetime.time(hour, minute, second, tzinfo=rr._tzinfo))
tset.sort()
return tset
def stimeset(self, hour, minute, second):
return (datetime.time(hour, minute, second,
tzinfo=self.rrule._tzinfo),)
class rruleset(rrulebase):
""" The rruleset type allows more complex recurrence setups, mixing
multiple rules, dates, exclusion rules, and exclusion dates. The type
constructor takes the following keyword arguments:
:param cache: If True, caching of results will be enabled, improving
performance of multiple queries considerably. """
class _genitem(object):
def __init__(self, genlist, gen):
try:
self.dt = advance_iterator(gen)
genlist.append(self)
except StopIteration:
pass
self.genlist = genlist
self.gen = gen
def __next__(self):
try:
self.dt = advance_iterator(self.gen)
except StopIteration:
if self.genlist[0] is self:
heapq.heappop(self.genlist)
else:
self.genlist.remove(self)
heapq.heapify(self.genlist)
next = __next__
def __lt__(self, other):
return self.dt < other.dt
def __gt__(self, other):
return self.dt > other.dt
def __eq__(self, other):
return self.dt == other.dt
def __ne__(self, other):
return self.dt != other.dt
def __init__(self, cache=False):
super(rruleset, self).__init__(cache)
self._rrule = []
self._rdate = []
self._exrule = []
self._exdate = []
@_invalidates_cache
def rrule(self, rrule):
""" Include the given :py:class:`rrule` instance in the recurrence set
generation. """
self._rrule.append(rrule)
@_invalidates_cache
def rdate(self, rdate):
""" Include the given :py:class:`datetime` instance in the recurrence
set generation. """
self._rdate.append(rdate)
@_invalidates_cache
def exrule(self, exrule):
""" Include the given rrule instance in the recurrence set exclusion
list. Dates which are part of the given recurrence rules will not
be generated, even if some inclusive rrule or rdate matches them.
"""
self._exrule.append(exrule)
@_invalidates_cache
def exdate(self, exdate):
""" Include the given datetime instance in the recurrence set
exclusion list. Dates included that way will not be generated,
even if some inclusive rrule or rdate matches them. """
self._exdate.append(exdate)
def _iter(self):
rlist = []
self._rdate.sort()
self._genitem(rlist, iter(self._rdate))
for gen in [iter(x) for x in self._rrule]:
self._genitem(rlist, gen)
exlist = []
self._exdate.sort()
self._genitem(exlist, iter(self._exdate))
for gen in [iter(x) for x in self._exrule]:
self._genitem(exlist, gen)
lastdt = None
total = 0
heapq.heapify(rlist)
heapq.heapify(exlist)
while rlist:
ritem = rlist[0]
if not lastdt or lastdt != ritem.dt:
while exlist and exlist[0] < ritem:
exitem = exlist[0]
advance_iterator(exitem)
if exlist and exlist[0] is exitem:
heapq.heapreplace(exlist, exitem)
if not exlist or ritem != exlist[0]:
total += 1
yield ritem.dt
lastdt = ritem.dt
advance_iterator(ritem)
if rlist and rlist[0] is ritem:
heapq.heapreplace(rlist, ritem)
self._len = total
class _rrulestr(object):
_freq_map = {"YEARLY": YEARLY,
"MONTHLY": MONTHLY,
"WEEKLY": WEEKLY,
"DAILY": DAILY,
"HOURLY": HOURLY,
"MINUTELY": MINUTELY,
"SECONDLY": SECONDLY}
_weekday_map = {"MO": 0, "TU": 1, "WE": 2, "TH": 3,
"FR": 4, "SA": 5, "SU": 6}
def _handle_int(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = int(value)
def _handle_int_list(self, rrkwargs, name, value, **kwargs):
rrkwargs[name.lower()] = [int(x) for x in value.split(',')]
_handle_INTERVAL = _handle_int
_handle_COUNT = _handle_int
_handle_BYSETPOS = _handle_int_list
_handle_BYMONTH = _handle_int_list
_handle_BYMONTHDAY = _handle_int_list
_handle_BYYEARDAY = _handle_int_list
_handle_BYEASTER = _handle_int_list
_handle_BYWEEKNO = _handle_int_list
_handle_BYHOUR = _handle_int_list
_handle_BYMINUTE = _handle_int_list
_handle_BYSECOND = _handle_int_list
def _handle_FREQ(self, rrkwargs, name, value, **kwargs):
rrkwargs["freq"] = self._freq_map[value]
def _handle_UNTIL(self, rrkwargs, name, value, **kwargs):
global parser
if not parser:
from dateutil import parser
try:
rrkwargs["until"] = parser.parse(value,
ignoretz=kwargs.get("ignoretz"),
tzinfos=kwargs.get("tzinfos"))
except ValueError:
raise ValueError("invalid until date")
def _handle_WKST(self, rrkwargs, name, value, **kwargs):
rrkwargs["wkst"] = self._weekday_map[value]
def _handle_BYWEEKDAY(self, rrkwargs, name, value, **kwargs):
"""
Two ways to specify this: +1MO or MO(+1)
"""
l = []
for wday in value.split(','):
if '(' in wday:
# If it's of the form TH(+1), etc.
splt = wday.split('(')
w = splt[0]
n = int(splt[1][:-1])
elif len(wday):
# If it's of the form +1MO
for i in range(len(wday)):
if wday[i] not in '+-0123456789':
break
n = wday[:i] or None
w = wday[i:]
if n:
n = int(n)
else:
raise ValueError("Invalid (empty) BYDAY specification.")
l.append(weekdays[self._weekday_map[w]](n))
rrkwargs["byweekday"] = l
_handle_BYDAY = _handle_BYWEEKDAY
def _parse_rfc_rrule(self, line,
dtstart=None,
cache=False,
ignoretz=False,
tzinfos=None):
if line.find(':') != -1:
name, value = line.split(':')
if name != "RRULE":
raise ValueError("unknown parameter name")
else:
value = line
rrkwargs = {}
for pair in value.split(';'):
name, value = pair.split('=')
name = name.upper()
value = value.upper()
try:
getattr(self, "_handle_"+name)(rrkwargs, name, value,
ignoretz=ignoretz,
tzinfos=tzinfos)
except AttributeError:
raise ValueError("unknown parameter '%s'" % name)
except (KeyError, ValueError):
raise ValueError("invalid '%s': %s" % (name, value))
return rrule(dtstart=dtstart, cache=cache, **rrkwargs)
def _parse_rfc(self, s,
dtstart=None,
cache=False,
unfold=False,
forceset=False,
compatible=False,
ignoretz=False,
tzids=None,
tzinfos=None):
global parser
if compatible:
forceset = True
unfold = True
TZID_NAMES = dict(map(
lambda x: (x.upper(), x),
re.findall('TZID=(?P<name>[^:]+):', s)
))
s = s.upper()
if not s.strip():
raise ValueError("empty string")
if unfold:
lines = s.splitlines()
i = 0
while i < len(lines):
line = lines[i].rstrip()
if not line:
del lines[i]
elif i > 0 and line[0] == " ":
lines[i-1] += line[1:]
del lines[i]
else:
i += 1
else:
lines = s.split()
if (not forceset and len(lines) == 1 and (s.find(':') == -1 or
s.startswith('RRULE:'))):
return self._parse_rfc_rrule(lines[0], cache=cache,
dtstart=dtstart, ignoretz=ignoretz,
tzinfos=tzinfos)
else:
rrulevals = []
rdatevals = []
exrulevals = []
exdatevals = []
for line in lines:
if not line:
continue
if line.find(':') == -1:
name = "RRULE"
value = line
else:
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
raise ValueError("empty property name")
name = parms[0]
parms = parms[1:]
if name == "RRULE":
for parm in parms:
raise ValueError("unsupported RRULE parm: "+parm)
rrulevals.append(value)
elif name == "RDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError("unsupported RDATE parm: "+parm)
rdatevals.append(value)
elif name == "EXRULE":
for parm in parms:
raise ValueError("unsupported EXRULE parm: "+parm)
exrulevals.append(value)
elif name == "EXDATE":
for parm in parms:
if parm != "VALUE=DATE-TIME":
raise ValueError("unsupported EXDATE parm: "+parm)
exdatevals.append(value)
elif name == "DTSTART":
# RFC 5445 3.8.2.4: The VALUE parameter is optional, but
# may be found only once.
value_found = False
TZID = None
valid_values = {"VALUE=DATE-TIME", "VALUE=DATE"}
for parm in parms:
if parm.startswith("TZID="):
try:
tzkey = TZID_NAMES[parm.split('TZID=')[-1]]
except KeyError:
continue
if tzids is None:
from . import tz
tzlookup = tz.gettz
elif callable(tzids):
tzlookup = tzids
else:
tzlookup = getattr(tzids, 'get', None)
if tzlookup is None:
msg = ('tzids must be a callable, ' +
'mapping, or None, ' +
'not %s' % tzids)
raise ValueError(msg)
TZID = tzlookup(tzkey)
continue
if parm not in valid_values:
raise ValueError("unsupported DTSTART parm: "+parm)
else:
if value_found:
msg = ("Duplicate value parameter found in " +
"DTSTART: " + parm)
raise ValueError(msg)
value_found = True
if not parser:
from dateutil import parser
dtstart = parser.parse(value, ignoretz=ignoretz,
tzinfos=tzinfos)
if TZID is not None:
if dtstart.tzinfo is None:
dtstart = dtstart.replace(tzinfo=TZID)
else:
raise ValueError('DTSTART specifies multiple timezones')
else:
raise ValueError("unsupported property: "+name)
if (forceset or len(rrulevals) > 1 or rdatevals
or exrulevals or exdatevals):
if not parser and (rdatevals or exdatevals):
from dateutil import parser
rset = rruleset(cache=cache)
for value in rrulevals:
rset.rrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in rdatevals:
for datestr in value.split(','):
rset.rdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exrulevals:
rset.exrule(self._parse_rfc_rrule(value, dtstart=dtstart,
ignoretz=ignoretz,
tzinfos=tzinfos))
for value in exdatevals:
for datestr in value.split(','):
rset.exdate(parser.parse(datestr,
ignoretz=ignoretz,
tzinfos=tzinfos))
if compatible and dtstart:
rset.rdate(dtstart)
return rset
else:
return self._parse_rfc_rrule(rrulevals[0],
dtstart=dtstart,
cache=cache,
ignoretz=ignoretz,
tzinfos=tzinfos)
def __call__(self, s, **kwargs):
return self._parse_rfc(s, **kwargs)
rrulestr = _rrulestr()
# vim:ts=4:sw=4:et
| ledtvavs/repository.ledtv | script.tvguide.Vader/resources/lib/dateutil/rrule.py | Python | gpl-3.0 | 64,642 | 0.000155 |
import logging
from ..analyses import AnalysesHub
from . import Analysis, CFGFast
l = logging.getLogger(name=__name__)
class Vtable:
"""
This contains the addr, size and function addresses of a Vtable
"""
def __init__(self, vaddr, size, func_addrs=None):
self.vaddr = vaddr
self.size = size
self.func_addrs = func_addrs if func_addrs else []
class VtableFinder(Analysis):
"""
This analysis locates Vtables in a binary based on heuristics taken from - "Reconstruction of Class Hierarchies
for Decompilation of C++ Programs"
"""
def __init__(self):
if "CFGFast" not in self.project.kb.cfgs:
# populate knowledge base
self.project.analyses[CFGFast].prep()(cross_references=True)
skip_analysis = True
# check if the sections exist
for sec in self.project.loader.main_object.sections:
if sec.name in [".data.rel.ro", ".rodata", ".data.rel.ro.local"]:
skip_analysis = False
if not skip_analysis:
self.vtables_list = self.analyze()
else:
l.warning("VtableFinder analysis is skipped")
def is_cross_referenced(self, addr):
return addr in self.project.kb.xrefs.xrefs_by_dst
def is_function(self, addr):
return addr in self.project.kb.functions
def analyze(self):
# finding candidate starting vtable addresses
# "current location is referenced from a code segment and its value is a pointer to a function,
# then it is marked as a start of vtable"
# taken from - Reconstruction of Class Hierarchies for Decompilation of C++ Programs
list_vtables = []
for sec in self.project.loader.main_object.sections:
if sec.name in [".data.rel.ro", ".rodata", ".data.rel.ro.local"]:
for offset in range(0, sec.memsize, self.project.arch.bytes):
cur_addr = sec.vaddr + offset
possible_func_addr = self.project.loader.memory.unpack_word(
cur_addr
)
# check if this address is referenced in the code segment
if self.is_cross_referenced(cur_addr):
# check if it is also a function, if so then it is possibly a vtable start
if self.is_function(possible_func_addr):
new_vtable = self.create_extract_vtable(
cur_addr, sec.memsize
)
if new_vtable is not None:
list_vtables.append(new_vtable)
return list_vtables
def create_extract_vtable(self, start_addr, sec_size):
# using the starting address extracting the vtable
# "Other elements of vtable must be unreferenced pointers to function"
# "Vtable ends with the first location that is either referenced from the program code,
# or is not a pointer to a function"
# taken from - Reconstruction of Class Hierarchies for Decompilation of C++ Programs
first_func_addr = self.project.loader.memory.unpack_word(start_addr)
cur_vtable = Vtable(start_addr, self.project.arch.bytes, [first_func_addr])
for cur_addr in range(
start_addr + self.project.arch.bytes,
start_addr + sec_size,
self.project.arch.bytes,
):
possible_func_addr = self.project.loader.memory.unpack_word(cur_addr)
if self.is_function(possible_func_addr) and not self.is_cross_referenced(
cur_addr
):
cur_vtable.func_addrs.append(possible_func_addr)
cur_vtable.size += self.project.arch.bytes
elif not self.is_function(possible_func_addr) or self.is_cross_referenced(
cur_addr
):
return cur_vtable
return None
AnalysesHub.register_default("VtableFinder", VtableFinder)
| angr/angr | angr/analyses/vtable.py | Python | bsd-2-clause | 4,142 | 0.002897 |
# ***************************************************************************
# * Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
# *
# * Licensed under the Apache License, Version 2.0 (the "License");
# * you may not use this file except in compliance with the License.
# * You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# **************************************************************************/
from cloudify import ctx
from cloudify.decorators import operation
@operation
def op(**_):
ctx.logger.info('Performing OP')
| cloudify-cosmo/cloudify-manager | tests/integration_tests/resources/dsl/plugin_tests/plugins/mock-plugin/mock_plugin/ops.py | Python | apache-2.0 | 940 | 0 |
# wikirandom.py: Functions for downloading random articles from Wikipedia
#
# Copyright (C) 2010 Matthew D. Hoffman
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, os, urllib2, re, string, time, threading, fnmatch
from random import randint
from bs4 import BeautifulSoup
class LiveparseDocGen():
def __init__(self, path):
print "initializing live parse doc gen with path " + path
self.doclist = []
for (root,dirnames,filenames) in os.walk(path):
for filename in fnmatch.filter(filenames, '*'):
#print os.path.join(root,filename)
self.doclist.append(os.path.join(root,filename))
def get_article(self, id):
docfile = self.doclist[id]
xml = open(docfile, 'r')
soup = BeautifulSoup(xml)
xml.close()
# find all the text
if soup.find("block", {"class":"full_text"}) is None:
return self.get_random_article()
fulltext = soup.find("block", {"class":"full_text"})
paras = fulltext.findAll("p")
alltxt = ' '.join([p.contents[0] for p in paras])
alltxt = alltxt.lower()
alltxt = re.sub(r'-', ' ', alltxt)
alltxt = re.sub(r'[^a-z ]', '', alltxt)
alltxt = re.sub(r' +', ' ', alltxt)
title = soup.find("title")
title = title.contents[0] if title else ""
byline = soup.find("byline")
subtitle = byline.contents[0] if byline and len(byline.contents) != 0 \
else ""
return (alltxt, title, subtitle, docfile)
def get_random_article(self):
id = randint(0, len(self.doclist) - 1)
return self.get_article(id)
def get_random_articles(self, n):
docs = []
for i in range(n):
(doc, title, subtitle, link) = self.get_random_article()
docs.append(doc)
return docs
def getDocCount(self):
return len(self.doclist)
def __iter__(self):
self.current = 0
return self
def next(self):
if self.current >= len(self.doclist):
raise StopIteration
else:
(all, title, subtitle, docfile) = self.get_article(self.current)
link = self.doclist[self.current]
self.current += 1
return (link, all, title, subtitle)
class PreparseDocGen():
def __init__(self, filename):
print "initializing preparsed doc gen with file " + filename
lines = open(filename).readlines()
self.docs = []
self.terms = set()
for line in lines:
wordids = []
wordcts = []
for token in line.split(' ')[1:]:
tokens = token.split(':')
wordids.append(int(tokens[0])-1)
wordcts.append(int(tokens[1]))
self.terms.add(int(tokens[0]))
self.docs.append((wordids, wordcts))
self.D = len(self.docs)
#''The first, wordids, says what vocabulary tokens are present in
#each document. wordids[i][j] gives the jth unique token present in
#document i. (Dont count on these tokens being in any particular
#order.)
#The second, wordcts, says how many times each vocabulary token is
#present. wordcts[i][j] is the number of times that the token given
#by wordids[i][j] appears in document i.
def get_random_articles(self, n):
wordids = []
wordcts = []
i = 0
while (i < n):
doc = self.docs[randint(0, self.D - 1)]
# omit short docs from training (to speed things up)
if len(doc[0]) < 5:
continue
wordids.append(doc[0])
wordcts.append(doc[1])
i += 1
return((wordids, wordcts))
def getDocCount(self):
return len(self.docs)
def getTermCount(self):
return len(self.terms)
def __iter__(self):
self.current = 0
return self
def next(self):
if self.current >= len(self.docs):
raise StopIteration
else:
doc = self.docs[self.current]
self.current += 1
return(([doc[0]], [doc[1]]))
def get_random_wikipedia_article():
"""
Downloads a randomly selected Wikipedia article (via
http://en.wikipedia.org/wiki/Special:Random) and strips out (most
of) the formatting, links, etc.
This function is a bit simpler and less robust than the code that
was used for the experiments in "Online VB for LDA."
"""
failed = True
while failed:
articletitle = None
failed = False
try:
req = urllib2.Request('http://en.wikipedia.org/wiki/Special:Random',
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
while not articletitle:
line = f.readline()
result = re.search(r'title="Edit this page" href="/w/index.php\?title=(.*)\&action=edit" /\>', line)
if (result):
articletitle = result.group(1)
break
elif (len(line) < 1):
sys.exit(1)
req = urllib2.Request('http://en.wikipedia.org/w/index.php?title=Special:Export/%s&action=submit' \
% (articletitle),
None, { 'User-Agent' : 'x'})
f = urllib2.urlopen(req)
all = f.read()
except (urllib2.HTTPError, urllib2.URLError):
print 'oops. there was a failure downloading %s. retrying...' \
% articletitle
failed = True
continue
print 'downloaded %s. parsing...' % articletitle
try:
all = re.search(r'<text.*?>(.*)</text', all, flags=re.DOTALL).group(1)
all = re.sub(r'\n', ' ', all)
all = re.sub(r'\{\{.*?\}\}', r'', all)
all = re.sub(r'\[\[Category:.*', '', all)
all = re.sub(r'==\s*[Ss]ource\s*==.*', '', all)
all = re.sub(r'==\s*[Rr]eferences\s*==.*', '', all)
all = re.sub(r'==\s*[Ee]xternal [Ll]inks\s*==.*', '', all)
all = re.sub(r'==\s*[Ee]xternal [Ll]inks and [Rr]eferences==\s*', '', all)
all = re.sub(r'==\s*[Ss]ee [Aa]lso\s*==.*', '', all)
all = re.sub(r'http://[^\s]*', '', all)
all = re.sub(r'\[\[Image:.*?\]\]', '', all)
all = re.sub(r'Image:.*?\|', '', all)
all = re.sub(r'\[\[.*?\|*([^\|]*?)\]\]', r'\1', all)
all = re.sub(r'\<.*?>', '', all)
except:
# Something went wrong, try again. (This is bad coding practice.)
print 'oops. there was a failure parsing %s. retrying...' \
% articletitle
failed = True
continue
return(all, articletitle)
class WikiThread(threading.Thread):
articles = list()
articlenames = list()
lock = threading.Lock()
def run(self):
(article, articlename) = get_random_wikipedia_article()
WikiThread.lock.acquire()
WikiThread.articles.append(article)
WikiThread.articlenames.append(articlename)
WikiThread.lock.release()
def get_random_wikipedia_articles(n):
"""
Downloads n articles in parallel from Wikipedia and returns lists
of their names and contents. Much faster than calling
get_random_wikipedia_article() serially.
"""
maxthreads = 8
WikiThread.articles = list()
WikiThread.articlenames = list()
wtlist = list()
for i in range(0, n, maxthreads):
print 'downloaded %d/%d articles...' % (i, n)
for j in range(i, min(i+maxthreads, n)):
wtlist.append(WikiThread())
wtlist[len(wtlist)-1].start()
for j in range(i, min(i+maxthreads, n)):
wtlist[j].join()
return (WikiThread.articles, WikiThread.articlenames)
if __name__ == '__main__':
t0 = time.time()
(articles, articlenames) = get_random_wikipedia_articles(1)
for i in range(0, len(articles)):
print articlenames[i]
t1 = time.time()
print 'took %f' % (t1 - t0)
| ajbc/lda-svi | generalrandom.py | Python | gpl-3.0 | 8,782 | 0.00353 |
from tests.api import auth_for
from tests.data import add_fixtures, clubs, users
def test_lva(db_session, client):
lva = clubs.lva(owner=users.john())
add_fixtures(db_session, lva)
res = client.get("/clubs/{id}".format(id=lva.id))
assert res.status_code == 200
assert res.json == {
"id": lva.id,
"name": "LV Aachen",
"timeCreated": "2015-12-24T12:34:56+00:00",
"website": "http://www.lv-aachen.de",
"isWritable": False,
"owner": {"id": lva.owner.id, "name": lva.owner.name},
}
def test_sfn(db_session, client):
sfn = clubs.sfn()
add_fixtures(db_session, sfn)
res = client.get("/clubs/{id}".format(id=sfn.id))
assert res.status_code == 200
assert res.json == {
u"id": sfn.id,
u"name": u"Sportflug Niederberg",
u"timeCreated": "2017-01-01T12:34:56+00:00",
u"website": None,
u"isWritable": False,
u"owner": None,
}
def test_writable(db_session, client):
lva = clubs.lva()
john = users.john(club=lva)
add_fixtures(db_session, lva, john)
res = client.get("/clubs/{id}".format(id=lva.id), headers=auth_for(john))
assert res.status_code == 200
assert res.json == {
"id": lva.id,
"name": "LV Aachen",
"timeCreated": "2015-12-24T12:34:56+00:00",
"website": "http://www.lv-aachen.de",
"isWritable": True,
"owner": None,
}
def test_missing(client):
res = client.get("/clubs/10000000")
assert res.status_code == 404
def test_invalid_id(client):
res = client.get("/clubs/abc")
assert res.status_code == 404
| skylines-project/skylines | tests/api/views/clubs/read_test.py | Python | agpl-3.0 | 1,643 | 0 |
"""
Polish-specific form helpers
"""
import re
from django.newforms import ValidationError
from django.newforms.fields import Select, RegexField
from django.utils.translation import ugettext_lazy as _
class PLVoivodeshipSelect(Select):
"""
A select widget with list of Polish voivodeships (administrative provinces)
as choices.
"""
def __init__(self, attrs=None):
from pl_voivodeships import VOIVODESHIP_CHOICES
super(PLVoivodeshipSelect, self).__init__(attrs, choices=VOIVODESHIP_CHOICES)
class PLAdministrativeUnitSelect(Select):
"""
A select widget with list of Polish administrative units as choices.
"""
def __init__(self, attrs=None):
from pl_administrativeunits import ADMINISTRATIVE_UNIT_CHOICES
super(PLAdministrativeUnitSelect, self).__init__(attrs, choices=ADMINISTRATIVE_UNIT_CHOICES)
class PLNationalIdentificationNumberField(RegexField):
"""
A form field that validates as Polish Identification Number (PESEL).
Checks the following rules:
* the length consist of 11 digits
* has a valid checksum
The algorithm is documented at http://en.wikipedia.org/wiki/PESEL.
"""
default_error_messages = {
'invalid': _(u'National Identification Number consists of 11 digits.'),
'checksum': _(u'Wrong checksum for the National Identification Number.'),
}
def __init__(self, *args, **kwargs):
super(PLNationalIdentificationNumberField, self).__init__(r'^\d{11}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLNationalIdentificationNumberField, self).clean(value)
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3, 1)
result = 0
for i in range(len(number)):
result += int(number[i]) * multiple_table[i]
return result % 10 == 0
class PLTaxNumberField(RegexField):
"""
A form field that validates as Polish Tax Number (NIP).
Valid forms are: XXX-XXX-YY-YY or XX-XX-YYY-YYY.
Checksum algorithm based on documentation at
http://wipos.p.lodz.pl/zylla/ut/nip-rego.html
"""
default_error_messages = {
'invalid': _(u'Enter a tax number field (NIP) in the format XXX-XXX-XX-XX or XX-XX-XXX-XXX.'),
'checksum': _(u'Wrong checksum for the Tax Number (NIP).'),
}
def __init__(self, *args, **kwargs):
super(PLTaxNumberField, self).__init__(r'^\d{3}-\d{3}-\d{2}-\d{2}$|^\d{2}-\d{2}-\d{3}-\d{3}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLTaxNumberField, self).clean(value)
value = re.sub("[-]", "", value)
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table = (6, 5, 7, 2, 3, 4, 5, 6, 7)
result = 0
for i in range(len(number)-1):
result += int(number[i]) * multiple_table[i]
result %= 11
if result == int(number[-1]):
return True
else:
return False
class PLNationalBusinessRegisterField(RegexField):
"""
A form field that validated as Polish National Official Business Register Number (REGON)
Valid forms are: 7 or 9 digits number
More on the field: http://www.stat.gov.pl/bip/regon_ENG_HTML.htm
The checksum algorithm is documented at http://wipos.p.lodz.pl/zylla/ut/nip-rego.html
"""
default_error_messages = {
'invalid': _(u'National Business Register Number (REGON) consists of 7 or 9 digits.'),
'checksum': _(u'Wrong checksum for the National Business Register Number (REGON).'),
}
def __init__(self, *args, **kwargs):
super(PLNationalBusinessRegisterField, self).__init__(r'^\d{7,9}$',
max_length=None, min_length=None, *args, **kwargs)
def clean(self,value):
super(PLNationalBusinessRegisterField, self).clean(value)
if not self.has_valid_checksum(value):
raise ValidationError(self.error_messages['checksum'])
return u'%s' % value
def has_valid_checksum(self, number):
"""
Calculates a checksum with the provided algorithm.
"""
multiple_table_7 = (2, 3, 4, 5, 6, 7)
multiple_table_9 = (8, 9, 2, 3, 4, 5, 6, 7)
result = 0
if len(number) == 7:
multiple_table = multiple_table_7
else:
multiple_table = multiple_table_9
for i in range(len(number)-1):
result += int(number[i]) * multiple_table[i]
result %= 11
if result == 10:
result = 0
if result == int(number[-1]):
return True
else:
return False
class PLPostalCodeField(RegexField):
"""
A form field that validates as Polish postal code.
Valid code is XX-XXX where X is digit.
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XX-XXX.'),
}
def __init__(self, *args, **kwargs):
super(PLPostalCodeField, self).__init__(r'^\d{2}-\d{3}$',
max_length=None, min_length=None, *args, **kwargs)
| diofeher/django-nfa | django/contrib/localflavor/pl/forms.py | Python | bsd-3-clause | 5,591 | 0.004114 |
from gui_items import *
from objects.xml.xml_step import Step
class ActionPanel:
def __init__(self, display):
self.steps = []
self.display = display
# new ACTION panel (textbox met draaien, knop voor het uitvoeren van de draaien)
self.panel = self.display.gui_items.add_panel(450, 390, (300, 300))
cube_action_gui_items = GuiItems(display, self.display.cube_gui, self.panel)
main_sizer = cube_action_gui_items.gen_box_sizer(wx.HORIZONTAL)
axes_sizer = cube_action_gui_items.gen_box_sizer(wx.VERTICAL)
output_sizer = cube_action_gui_items.gen_box_sizer(wx.VERTICAL)
# uitvoer draaien knop
cube_action_button = cube_action_gui_items.gen_button("Run actions.", 20, 20)
cube_action_button.btn_id = 'run'
cube_action_button.Bind(wx.EVT_BUTTON, lambda event: self._button_run())
# reset textbox button
cube_reset_textbox_button = cube_action_gui_items.gen_button("Reset actions.", 30, 30)
cube_reset_textbox_button.Bind(wx.EVT_BUTTON, lambda event: self._button_reset())
# textbox met draaien
self.cube_action_textbox = cube_action_gui_items.gen_textbox(10, 10, (200, -1), (wx.TE_MULTILINE))
# dropdown for selecting cube row
combo_box_items = []
for size in range(self.display._storage.cube_size):
combo_box_items.append(str(size+1))
self.action_combo_box = cube_action_gui_items.gen_combobox((150, 10), (150, -1), combo_box_items)
self.action_combo_box.SetSelection(0)
# turnable checkbox(clockwise, counterclockwise)
cube_turnable_checkbox = cube_action_gui_items.gen_radiobox(20, 20, (100, 100), wx.RA_SPECIFY_ROWS,
['CounterClockwise', 'Clockwise'])
# buttons voor het draaien (MET BIND)
x_button = cube_action_gui_items.gen_button("Voer X in", 0, 0)
x_button.btn_id = 'x'
y_button = cube_action_gui_items.gen_button("Voer Y in", 0, 0)
y_button.btn_id = 'y'
z_button = cube_action_gui_items.gen_button("Voer Z in", 0, 0)
z_button.btn_id = 'z'
x_button.Bind(wx.EVT_BUTTON, lambda event: self._button_x_y_z('x', self.action_combo_box.GetValue(),
cube_turnable_checkbox.GetSelection()))
y_button.Bind(wx.EVT_BUTTON, lambda event: self._button_x_y_z('y', self.action_combo_box.GetValue(),
cube_turnable_checkbox.GetSelection()))
z_button.Bind(wx.EVT_BUTTON, lambda event: self._button_x_y_z('z', self.action_combo_box.GetValue(),
cube_turnable_checkbox.GetSelection()))
# undo button
undo_button = cube_action_gui_items.gen_button("Undo last input", 0,0)
undo_button.Bind(wx.EVT_BUTTON, self.__undo)
# add elements to box_sizers
output_sizer.Add(self.cube_action_textbox, 0, wx.ALL, 5)
output_sizer.Add(cube_action_button, 0, wx.ALL, 5)
output_sizer.Add(cube_reset_textbox_button, 0, wx.ALL, 5)
output_sizer.Add(undo_button, 0, wx.ALL, 5)
axes_sizer.Add(x_button, 0, wx.ALL, 1)
axes_sizer.Add(y_button, 0, wx.ALL, 1)
axes_sizer.Add(z_button, 0, wx.ALL, 1)
axes_sizer.Add(self.action_combo_box, 0, wx.ALL, 1)
axes_sizer.Add(cube_turnable_checkbox, 0, wx.ALL, 1)
main_sizer.Add(output_sizer)
main_sizer.Add(axes_sizer)
# set sizer to panel
self.panel.SetSizer(main_sizer)
self.panel.Layout()
# hide panel
self.panel.Hide()
def __undo(self, event):
counter = 1
textbox_items = ""
splitted_inserted_text = self.cube_action_textbox.GetValue().split(';')
for current_split in splitted_inserted_text:
if counter < len(splitted_inserted_text):
textbox_items += ";" + current_split
counter += 1
# change textbox value
self.cube_action_textbox.Clear()
self.cube_action_textbox.AppendText(textbox_items[1:]) # minus first ; char
def _button_run(self):
self.read_steps()
self.display._storage.current_cube.execute_steps(self.steps)
def read_steps(self):
self.steps = []
text = str(self.cube_action_textbox.GetValue())
if not text == "":
for current_split in text.split(';'):
var_split = current_split.split(',')
self.steps.append(Step(var_split[0], int(var_split[1])-1, int(var_split[2])))
print var_split
def _button_reset(self):
self.steps = []
self.cube_action_textbox.Clear()
def _reset_textbox(self):
self.cube_action_textbox.Clear()
for step in self.steps:
self.cube_action_textbox.AppendText(";" + str(step.axis) + "," + str(step.rows) + "," + str(step.direction))
def _button_x_y_z(self, axis, row, direction):
if direction == 0:
direction = -1
if len(self.cube_action_textbox.GetValue()) == 0:
self.cube_action_textbox.AppendText(str(axis) + "," + str(row) + "," + str(direction))
else:
self.cube_action_textbox.AppendText(";" + str(axis) + "," + str(row) + "," + str(direction)) | Willempie/Artificial_Intelligence_Cube | logic/handling/panel_action.py | Python | apache-2.0 | 5,455 | 0.005683 |
#-----------------------------------------------------------------------------
# Copyright (c) 2010-2012 Brian Granger, Min Ragan-Kelley
#
# This file is part of pyzmq
#
# Distributed under the terms of the New BSD License. The full license is in
# the file COPYING.BSD, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from unittest import TestCase
import zmq
from zmq.sugar import version
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
class TestVersion(TestCase):
def test_pyzmq_version(self):
vs = zmq.pyzmq_version()
vs2 = zmq.__version__
self.assertTrue(isinstance(vs, str))
if zmq.__revision__:
self.assertEqual(vs, '@'.join(vs2, zmq.__revision__))
else:
self.assertEqual(vs, vs2)
if version.VERSION_EXTRA:
self.assertTrue(version.VERSION_EXTRA in vs)
self.assertTrue(version.VERSION_EXTRA in vs2)
def test_pyzmq_version_info(self):
info = zmq.pyzmq_version_info()
self.assertTrue(isinstance(info, tuple))
for n in info[:3]:
self.assertTrue(isinstance(n, int))
if version.VERSION_EXTRA:
self.assertEqual(len(info), 4)
self.assertEqual(info[-1], float('inf'))
else:
self.assertEqual(len(info), 3)
def test_zmq_version_info(self):
info = zmq.zmq_version_info()
self.assertTrue(isinstance(info, tuple))
for n in info[:3]:
self.assertTrue(isinstance(n, int))
def test_zmq_version(self):
v = zmq.zmq_version()
self.assertTrue(isinstance(v, str))
| ellisonbg/pyzmq | zmq/tests/test_version.py | Python | lgpl-3.0 | 1,970 | 0.003553 |
__author__ = 'sysferland'
import argparse, subprocess, os
parser = argparse.ArgumentParser()
parser.add_argument('-feed', help='feed name')
parser.add_argument('-ffserver', help='ffserver IP and PORT')
parser.add_argument('-source', help='video source path if DVD Raw the path to the VIDEO_TS folder')
parser.add_argument('-seek', help='time to seek to in for the feed')
parser.add_argument('-binpath', help='ffmpeg bin path')
args = parser.parse_args()
videofile = os.path.normpath(args.source)
#dt_to = datetime.time(00,20,00)
#dt_delta = datetime.time(00,00,30)
#seek_to = datetime.timedelta(hours=dt_to.hour,minutes=dt_to.minute,seconds=dt_to.second)
#seek_delta = datetime.timedelta(hours=dt_delta.hour,minutes=dt_delta.minute,seconds=dt_delta.second)
#seek_to_fast = seek_to - seek_delta
seek_delta = "00:00:00"
seek_to_fast = "00:00:00"
other_options = "-ss " + str(seek_to_fast)
options = "-ss "+ str(seek_delta) # +" -trellis 1 -lmax 42000 "
ffm_output = " http://"+args.ffserver+"/"+args.feed
command = args.binpath + "ffmpeg -threads 2 "+ other_options +" -i " + videofile.replace("'", "\\'").replace(" ", "\\ ").replace("-", "\-").replace("&", "\&") + " " + options + ffm_output
command = command.replace("&", "\&")
print command
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#print process.stdout
out, err = process.communicate() | RIEI/tongue | TongueD/StreamThread.py | Python | gpl-2.0 | 1,400 | 0.013571 |
# -*- coding: utf-8 -*-
# (c) 2015 Antiun Ingeniería S.L. - Sergio Teruel
# (c) 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp.tests.common import TransactionCase
class TestSAuthSupplier(TransactionCase):
def setUp(self):
super(TestSAuthSupplier, self).setUp()
ir_config_parameter = self.env['ir.config_parameter']
ir_config_parameter.set_param('auth_signup.allow_uninvited', 'True')
def test_user_signup(self):
values = {
'login': '[email protected]',
'name': 'test',
'password': '1234',
'account_type': 'supplier'
}
user_obj = self.env['res.users']
user = user_obj.browse(user_obj._signup_create_user(values))
self.assertTrue(user.partner_id.supplier)
| MackZxh/OCA-Choice | server-tools/auth_supplier/tests/test_auth_supplier.py | Python | lgpl-3.0 | 852 | 0 |
from django.contrib import admin
import models
from pombola.slug_helpers.admin import StricterSlugFieldMixin
class QuizAdmin(StricterSlugFieldMixin, admin.ModelAdmin):
prepopulated_fields = {"slug": ["name"]}
class StatementAdmin(admin.ModelAdmin):
pass
class PartyAdmin(admin.ModelAdmin):
pass
class StanceAdmin(admin.ModelAdmin):
pass
class SubmissionAdmin(admin.ModelAdmin):
pass
class AnswerAdmin(admin.ModelAdmin):
pass
admin.site.register(models.Quiz, QuizAdmin)
admin.site.register(models.Statement, StatementAdmin)
admin.site.register(models.Party, PartyAdmin)
admin.site.register(models.Stance, StanceAdmin)
admin.site.register(models.Submission, SubmissionAdmin)
admin.site.register(models.Answer, AnswerAdmin)
| hzj123/56th | pombola/votematch/admin.py | Python | agpl-3.0 | 754 | 0.01061 |
# coding=utf-8
# generate_completion_cache.py - generate cache for dnf bash completion
# Copyright © 2013 Elad Alfassa <[email protected]>
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
from dnfpluginscore import logger
import dnf
import os.path
class BashCompletionCache(dnf.Plugin):
name = 'generate_completion_cache'
def __init__(self, base, cli):
self.base = base
self.available_cache_file = '/var/cache/dnf/available.cache'
self.installed_cache_file = '/var/cache/dnf/installed.cache'
def _out(self, msg):
logger.debug('Completion plugin: %s', msg)
def sack(self):
''' Generate cache of available packages '''
# We generate this cache only if the repos were just freshed or if the
# cache file doesn't exist
fresh = False
for repo in self.base.repos.iter_enabled():
if repo.metadata is not None and repo.metadata.fresh:
# One fresh repo is enough to cause a regen of the cache
fresh = True
break
if not os.path.exists(self.available_cache_file) or fresh:
try:
with open(self.available_cache_file, 'w') as cache_file:
self._out('Generating completion cache...')
available_packages = self.base.sack.query().available()
for package in available_packages:
cache_file.write(package.name + '\n')
except Exception as e:
self._out('Can\'t write completion cache: %s' % e)
def transaction(self):
''' Generate cache of installed packages '''
try:
with open(self.installed_cache_file, 'w') as cache_file:
installed_packages = self.base.sack.query().installed()
self._out('Generating completion cache...')
for package in installed_packages:
cache_file.write(package.name + '\n')
except Exception as e:
self._out('Can\'t write completion cache: %s' % e)
| rholy/dnf-plugins-core | plugins/generate_completion_cache.py | Python | gpl-2.0 | 2,729 | 0 |
# Name: mapper_opendap_sentinel1.py
# Purpose: Nansat mapping for ESA Sentinel-1 data from the Norwegian ground segment
# Author: Morten W. Hansen
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
import os
from datetime import datetime
import json
import warnings
import numpy as np
from netCDF4 import Dataset
from nansat.utils import gdal
try:
import scipy
except:
IMPORT_SCIPY = False
else:
IMPORT_SCIPY = True
import pythesint as pti
from nansat.mappers.sentinel1 import Sentinel1
from nansat.mappers.opendap import Opendap
from nansat.vrt import VRT
from nansat.nsr import NSR
from nansat.utils import initial_bearing
class Mapper(Opendap, Sentinel1):
baseURLs = [
'http://nbstds.met.no/thredds/dodsC/NBS/S1A',
'http://nbstds.met.no/thredds/dodsC/NBS/S1B',
]
timeVarName = 'time'
xName = 'x'
yName = 'y'
timeCalendarStart = '1981-01-01'
srcDSProjection = NSR().wkt
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):
self.test_mapper(filename)
if not IMPORT_SCIPY:
raise NansatReadError('Sentinel-1 data cannot be read because scipy is not installed')
timestamp = date if date else self.get_date(filename)
self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
Sentinel1.__init__(self, filename)
self.add_calibrated_nrcs(filename)
self.add_nrcs_VV_from_HH(filename)
def add_calibrated_nrcs(self, filename):
layer_time_id, layer_date = Opendap.get_layer_datetime(None,
self.convert_dstime_datetimes(self.get_dataset_time()))
polarizations = [self.ds.polarisation[i:i+2] for i in range(0,len(self.ds.polarisation),2)]
for pol in polarizations:
dims = list(self.ds.variables['dn_%s' %pol].dimensions)
dims[dims.index(self.timeVarName)] = layer_time_id
src = [
self.get_metaitem(filename, 'Amplitude_%s' %pol, dims)['src'],
self.get_metaitem(filename, 'sigmaNought_%s' %pol, dims)['src']
]
dst = {
'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'PixelFunctionType': 'Sentinel1Calibration',
'polarization': pol,
'suffix': pol,
}
self.create_band(src, dst)
self.dataset.FlushCache()
def add_nrcs_VV_from_HH(self, filename):
if not 'Amplitude_HH' in self.ds.variables.keys():
return
layer_time_id, layer_date = Opendap.get_layer_datetime(None,
self.convert_dstime_datetimes(self.get_dataset_time()))
dims = list(self.ds.variables['dn_HH'].dimensions)
dims[dims.index(self.timeVarName)] = layer_time_id
src = [
self.get_metaitem(filename, 'Amplitude_HH', dims)['src'],
self.get_metaitem(filename, 'sigmaNought_HH', dims)['src'],
{'SourceFilename': self.band_vrts['inciVRT'].filename, 'SourceBand': 1}
]
dst = {
'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
'polarization': 'VV',
'suffix': 'VV'}
self.create_band(src, dst)
self.dataset.FlushCache()
@staticmethod
def get_date(filename):
"""Extract date and time parameters from filename and return
it as a formatted (isoformat) string
Parameters
----------
filename: str
nn
Returns
-------
str, YYYY-mm-ddThh:MMZ
"""
_, filename = os.path.split(filename)
t = datetime.strptime(filename.split('_')[4], '%Y%m%dT%H%M%S')
return datetime.strftime(t, '%Y-%m-%dT%H:%M:%SZ')
def convert_dstime_datetimes(self, ds_time):
"""Convert time variable to np.datetime64"""
ds_datetimes = np.array(
[(np.datetime64(self.timeCalendarStart).astype('M8[s]')
+ np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
return ds_datetimes
def get_geotransform(self):
""" Return fake and temporary geotransform. This will be replaced by gcps in
Sentinel1.__init__
"""
xx = self.ds.variables['lon'][0:100:50, 0].data
yy = self.ds.variables['lat'][0, 0:100:50].data
return xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0]
| nansencenter/nansat | nansat/mappers/mapper_opendap_sentinel1.py | Python | gpl-3.0 | 4,824 | 0.004561 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""For internal use only; no backwards-compatibility guarantees."""
import abc
import inspect
from google.protobuf import wrappers_pb2
from apache_beam.internal import pickler
from apache_beam.utils import proto_utils
PICKLED_WINDOW_FN = "beam:window_fn:pickled_python:v0.1"
GLOBAL_WINDOWS_FN = "beam:window_fn:global_windows:v0.1"
FIXED_WINDOWS_FN = "beam:window_fn:fixed_windows:v0.1"
SLIDING_WINDOWS_FN = "beam:window_fn:sliding_windows:v0.1"
SESSION_WINDOWS_FN = "beam:window_fn:session_windows:v0.1"
PICKLED_CODER = "beam:coder:pickled_python:v0.1"
PICKLED_TRANSFORM = "beam:ptransform:pickled_python:v0.1"
FLATTEN_TRANSFORM = "beam:ptransform:flatten:v0.1"
WINDOW_INTO_TRANSFORM = "beam:ptransform:window_into:v0.1"
class RunnerApiFn(object):
"""Abstract base class that provides urn registration utilities.
A class that inherits from this class will get a registration-based
from_runner_api and to_runner_api method that convert to and from
beam_runner_api_pb2.SdkFunctionSpec.
Additionally, register_pickle_urn can be called from the body of a class
to register serialization via pickling.
"""
__metaclass__ = abc.ABCMeta
_known_urns = {}
@abc.abstractmethod
def to_runner_api_parameter(self, unused_context):
"""Returns the urn and payload for this Fn.
The returned urn(s) should be registered with `register_urn`.
"""
pass
@classmethod
def register_urn(cls, urn, parameter_type, fn=None):
"""Registeres a urn with a constructor.
For example, if 'beam:fn:foo' had paramter type FooPayload, one could
write `RunnerApiFn.register_urn('bean:fn:foo', FooPayload, foo_from_proto)`
where foo_from_proto took as arguments a FooPayload and a PipelineContext.
This function can also be used as a decorator rather than passing the
callable in as the final parameter.
A corresponding to_runner_api_parameter method would be expected that
returns the tuple ('beam:fn:foo', FooPayload)
"""
def register(fn):
cls._known_urns[urn] = parameter_type, fn
return staticmethod(fn)
if fn:
# Used as a statement.
register(fn)
else:
# Used as a decorator.
return register
@classmethod
def register_pickle_urn(cls, pickle_urn):
"""Registers and implements the given urn via pickling.
"""
inspect.currentframe().f_back.f_locals['to_runner_api_parameter'] = (
lambda self, context: (
pickle_urn, wrappers_pb2.BytesValue(value=pickler.dumps(self))))
cls.register_urn(
pickle_urn,
wrappers_pb2.BytesValue,
lambda proto, unused_context: pickler.loads(proto.value))
def to_runner_api(self, context):
"""Returns an SdkFunctionSpec encoding this Fn.
Prefer overriding self.to_runner_api_parameter.
"""
from apache_beam.runners.api import beam_runner_api_pb2
urn, typed_param = self.to_runner_api_parameter(context)
return beam_runner_api_pb2.SdkFunctionSpec(
spec=beam_runner_api_pb2.FunctionSpec(
urn=urn,
parameter=proto_utils.pack_Any(typed_param)))
@classmethod
def from_runner_api(cls, fn_proto, context):
"""Converts from an SdkFunctionSpec to a Fn object.
Prefer registering a urn with its parameter type and constructor.
"""
parameter_type, constructor = cls._known_urns[fn_proto.spec.urn]
return constructor(
proto_utils.unpack_Any(fn_proto.spec.parameter, parameter_type),
context)
| dhalperi/beam | sdks/python/apache_beam/utils/urns.py | Python | apache-2.0 | 4,265 | 0.00422 |
#!/usr/bin/python
import sys
""" My input is 2234,2234,765,2,3,44,44,55,33,33,2,33,33,33
my o/p
2234:2,765,2,3,44:2,55,33:2,2,33:3"""
my_input = sys.argv[1]
#my_input = "1,7,2234,2234,765,2,3,44,44,55,33,33,2,33,33,33,33,1"
my_list = my_input.split(",")
my_str = ""
#print my_list
init = my_list[0]
count = 0
final_list = []
for i in my_list:
if i == init:
count += 1
else:
#print init, count
my_str = my_str + "," + "%s:%s" %(init,count)
count = 1
init = i
#print init, count
my_str = my_str + "," + "%s:%s" %(init,count)
print my_str.replace(":1","")[1:]
#final_list = zip(my_numbers,my_count)
#print final_list
| hiteshagrawal/python | info/bkcom/problem8.py | Python | gpl-2.0 | 635 | 0.034646 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from weblate.trans.models.changes import Change
from weblate.trans.forms import ReportsForm
from weblate.trans.views.helper import get_subproject
from weblate.trans.permissions import can_view_reports
from django.http import HttpResponse, JsonResponse
from django.views.decorators.http import require_POST
from django.shortcuts import redirect
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
def generate_credits(component, start_date, end_date):
"""Generates credits data for given component."""
result = []
for translation in component.translation_set.all():
authors = Change.objects.content().filter(
translation=translation,
timestamp__range=(start_date, end_date),
).values_list(
'author__email', 'author__first_name'
)
if not authors:
continue
result.append({translation.language.name: sorted(set(authors))})
return result
@login_required
@require_POST
def get_credits(request, project, subproject):
"""View for credits"""
obj = get_subproject(request, project, subproject)
if not can_view_reports(request.user, obj.project):
raise PermissionDenied()
form = ReportsForm(request.POST)
if not form.is_valid():
return redirect(obj)
data = generate_credits(
obj,
form.cleaned_data['start_date'],
form.cleaned_data['end_date'],
)
if form.cleaned_data['style'] == 'json':
return JsonResponse(data=data, safe=False)
if form.cleaned_data['style'] == 'html':
start = '<table>'
row_start = '<tr>'
language_format = u'<th>{0}</th>'
translator_start = '<td><ul>'
translator_format = u'<li><a href="mailto:{0}">{1}</a></li>'
translator_end = '</ul></td>'
row_end = '</tr>'
mime = 'text/html'
end = '</table>'
else:
start = ''
row_start = ''
language_format = u'* {0}\n'
translator_start = ''
translator_format = u' * {1} <{0}>'
translator_end = ''
row_end = ''
mime = 'text/plain'
end = ''
result = []
result.append(start)
for language in data:
name, translators = language.items()[0]
result.append(row_start)
result.append(language_format.format(name))
result.append(
u'{0}{1}{2}'.format(
translator_start,
'\n'.join(
[translator_format.format(*t) for t in translators]
),
translator_end,
)
)
result.append(row_end)
result.append(end)
return HttpResponse(
'\n'.join(result),
content_type='{0}; charset=utf-8'.format(mime),
)
def generate_counts(component, start_date, end_date):
"""Generates credits data for given component."""
result = {}
for translation in component.translation_set.all():
authors = Change.objects.content().filter(
translation=translation,
timestamp__range=(start_date, end_date),
).values_list(
'author__email', 'author__first_name', 'unit__num_words',
)
for email, name, words in authors:
if words is None:
continue
if email not in result:
result[email] = {
'name': name,
'email': email,
'words': words,
'count': 1,
}
else:
result[email]['words'] += words
result[email]['count'] += 1
return result.values()
@login_required
@require_POST
def get_counts(request, project, subproject):
"""View for work counts"""
obj = get_subproject(request, project, subproject)
if not can_view_reports(request.user, obj.project):
raise PermissionDenied()
form = ReportsForm(request.POST)
if not form.is_valid():
return redirect(obj)
data = generate_counts(
obj,
form.cleaned_data['start_date'],
form.cleaned_data['end_date'],
)
if form.cleaned_data['style'] == 'json':
return JsonResponse(data=data, safe=False)
if form.cleaned_data['style'] == 'html':
start = (
'<table>\n<tr><th>Name</th><th>Email</th>'
'<th>Words</th><th>Count</th></tr>'
)
row_start = '<tr>'
cell_name = cell_email = cell_words = cell_count = u'<td>{0}</td>\n'
row_end = '</tr>'
mime = 'text/html'
end = '</table>'
else:
heading = ' '.join([
'=' * 40,
'=' * 40,
'=' * 10,
'=' * 10,
])
start = '{0}\n{1:40} {2:40} {3:10} {4:10}\n{0}'.format(
heading,
'Name',
'Email',
'Words',
'Count'
)
row_start = ''
cell_name = cell_email = u'{0:40} '
cell_words = cell_count = u'{0:10} '
row_end = ''
mime = 'text/plain'
end = heading
result = []
result.append(start)
for item in data:
if row_start:
result.append(row_start)
result.append(
u'{0}{1}{2}{3}'.format(
cell_name.format(item['name']),
cell_email.format(item['email']),
cell_words.format(item['words']),
cell_count.format(item['count']),
)
)
if row_end:
result.append(row_end)
result.append(end)
return HttpResponse(
'\n'.join(result),
content_type='{0}; charset=utf-8'.format(mime),
)
| miyataken999/weblate | weblate/trans/views/reports.py | Python | gpl-3.0 | 6,538 | 0 |
# This module is DEPRECATED!
#
# You should no longer be pointing your mod_python configuration
# at "django.core.handler".
#
# Use "django.core.handlers.modpython" instead.
from django.core.handlers.modpython import ModPythonHandler
def handler(req):
return ModPythonHandler()(req)
| ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-0.96/django/core/handler.py | Python | bsd-3-clause | 289 | 0.00346 |
# ported from:
# https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/engine.py
import asyncio
import aiomysql
from .connection import SAConnection
from .exc import InvalidRequestError, ArgumentError
from ..utils import _PoolContextManager, _PoolAcquireContextManager
from ..cursors import (
Cursor, DeserializationCursor, DictCursor, SSCursor, SSDictCursor)
try:
from sqlalchemy.dialects.mysql.pymysql import MySQLDialect_pymysql
from sqlalchemy.dialects.mysql.mysqldb import MySQLCompiler_mysqldb
except ImportError: # pragma: no cover
raise ImportError('aiomysql.sa requires sqlalchemy')
class MySQLCompiler_pymysql(MySQLCompiler_mysqldb):
def construct_params(self, params=None, _group_number=None, _check=True):
pd = super().construct_params(params, _group_number, _check)
for column in self.prefetch:
pd[column.key] = self._exec_default(column.default)
return pd
def _exec_default(self, default):
if default.is_callable:
return default.arg(self.dialect)
else:
return default.arg
_dialect = MySQLDialect_pymysql(paramstyle='pyformat')
_dialect.statement_compiler = MySQLCompiler_pymysql
_dialect.default_paramstyle = 'pyformat'
def create_engine(minsize=1, maxsize=10, loop=None,
dialect=_dialect, pool_recycle=-1, compiled_cache=None,
**kwargs):
"""A coroutine for Engine creation.
Returns Engine instance with embedded connection pool.
The pool has *minsize* opened connections to MySQL server.
"""
deprecated_cursor_classes = [
DeserializationCursor, DictCursor, SSCursor, SSDictCursor,
]
cursorclass = kwargs.get('cursorclass', Cursor)
if not issubclass(cursorclass, Cursor) or any(
issubclass(cursorclass, cursor_class)
for cursor_class in deprecated_cursor_classes
):
raise ArgumentError('SQLAlchemy engine does not support '
'this cursor class')
coro = _create_engine(minsize=minsize, maxsize=maxsize, loop=loop,
dialect=dialect, pool_recycle=pool_recycle,
compiled_cache=compiled_cache, **kwargs)
return _EngineContextManager(coro)
async def _create_engine(minsize=1, maxsize=10, loop=None,
dialect=_dialect, pool_recycle=-1,
compiled_cache=None, **kwargs):
if loop is None:
loop = asyncio.get_event_loop()
pool = await aiomysql.create_pool(minsize=minsize, maxsize=maxsize,
loop=loop,
pool_recycle=pool_recycle, **kwargs)
conn = await pool.acquire()
try:
return Engine(dialect, pool, compiled_cache=compiled_cache, **kwargs)
finally:
pool.release(conn)
class Engine:
"""Connects a aiomysql.Pool and
sqlalchemy.engine.interfaces.Dialect together to provide a
source of database connectivity and behavior.
An Engine object is instantiated publicly using the
create_engine coroutine.
"""
def __init__(self, dialect, pool, compiled_cache=None, **kwargs):
self._dialect = dialect
self._pool = pool
self._compiled_cache = compiled_cache
self._conn_kw = kwargs
@property
def dialect(self):
"""An dialect for engine."""
return self._dialect
@property
def name(self):
"""A name of the dialect."""
return self._dialect.name
@property
def driver(self):
"""A driver of the dialect."""
return self._dialect.driver
@property
def minsize(self):
return self._pool.minsize
@property
def maxsize(self):
return self._pool.maxsize
@property
def size(self):
return self._pool.size
@property
def freesize(self):
return self._pool.freesize
def close(self):
"""Close engine.
Mark all engine connections to be closed on getting back to pool.
Closed engine doesn't allow to acquire new connections.
"""
self._pool.close()
def terminate(self):
"""Terminate engine.
Terminate engine pool with instantly closing all acquired
connections also.
"""
self._pool.terminate()
async def wait_closed(self):
"""Wait for closing all engine's connections."""
await self._pool.wait_closed()
def acquire(self):
"""Get a connection from pool."""
coro = self._acquire()
return _EngineAcquireContextManager(coro, self)
async def _acquire(self):
raw = await self._pool.acquire()
conn = SAConnection(raw, self, compiled_cache=self._compiled_cache)
return conn
def release(self, conn):
"""Revert back connection to pool."""
if conn.in_transaction:
raise InvalidRequestError("Cannot release a connection with "
"not finished transaction")
raw = conn.connection
return self._pool.release(raw)
def __enter__(self):
raise RuntimeError(
'"yield from" should be used as context manager expression')
def __exit__(self, *args):
# This must exist because __enter__ exists, even though that
# always raises; that's how the with-statement works.
pass # pragma: nocover
def __iter__(self):
# This is not a coroutine. It is meant to enable the idiom:
#
# with (yield from engine) as conn:
# <block>
#
# as an alternative to:
#
# conn = yield from engine.acquire()
# try:
# <block>
# finally:
# engine.release(conn)
conn = yield from self.acquire()
return _ConnectionContextManager(self, conn)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
self.close()
await self.wait_closed()
_EngineContextManager = _PoolContextManager
_EngineAcquireContextManager = _PoolAcquireContextManager
class _ConnectionContextManager:
"""Context manager.
This enables the following idiom for acquiring and releasing a
connection around a block:
with (yield from engine) as conn:
cur = yield from conn.cursor()
while failing loudly when accidentally using:
with engine:
<block>
"""
__slots__ = ('_engine', '_conn')
def __init__(self, engine, conn):
self._engine = engine
self._conn = conn
def __enter__(self):
assert self._conn is not None
return self._conn
def __exit__(self, *args):
try:
self._engine.release(self._conn)
finally:
self._engine = None
self._conn = None
| aio-libs/aiomysql | aiomysql/sa/engine.py | Python | mit | 6,916 | 0 |
#!/usr/bin/python3
# Copyright 2018 Francisco Pina Martins <[email protected]>
# This file is part of geste2lfmm.
# geste2lfmm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# geste2lfmm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with geste2lfmm. If not, see <http://www.gnu.org/licenses/>.
# Usage: python3 geste2lfmm.py file.geste file.lfmm
from collections import OrderedDict
def parse_geste(infile_name):
"""
Parses a GESTE file and retuns an OrderedDict with:
{"Population_name":[Freq_ref_allele_on SNP_1,Freq_ref_allele_on SNP_2,...]}
"""
infile = open(infile_name, "r")
pop_freqs = OrderedDict()
pop_starter = "[pop]="
popname = ""
for line in infile:
# Neat trick to ignore data that is not SNP info
# This code section should be very performant since it replaces most
# if - else tests with try -> except statements
line = line.split()
try:
int(line[0])
except ValueError: # In case it's a new section
if line[0].startswith(pop_starter):
popname = "Pop %s" % line[0].strip().replace(pop_starter, "")
pop_freqs[popname] = []
continue
except IndexError: # In case it's an empty line
continue
try:
ref_frequency = round(int(line[3]) / int(line[1]), 3)
except ZeroDivisionError:
ref_frequency = 9
pop_freqs[popname].append(ref_frequency)
infile.close()
return pop_freqs
def write_lfmm(pop_freqs, lfmm_filename):
"""
Write a LFMM inpt file based on the OrderedDict extracted from the GESTE
file.
"""
outfile = open(lfmm_filename, 'w')
for name, freq in pop_freqs.items():
outfile.write(name + "\t")
outfile.write("\t".join(map(str, freq)) + "\n")
outfile.close()
if __name__ == "__main__":
from sys import argv
POP_FREQS = parse_geste(argv[1])
write_lfmm(POP_FREQS, argv[2])
| StuntsPT/pyRona | helper_scripts/geste2lfmm.py | Python | gpl-3.0 | 2,430 | 0 |
# -*- coding: utf-8 -*-
"""Utility functions.
"""
from collections import OrderedDict
from .bsd_checksum import bsd_checksum # make name available from this module
def n_(s, replacement='_'):
"""Make binary fields more readable.
"""
if isinstance(s, (str, unicode, bytearray)):
return s.replace('\0', replacement)
return s
def split_string(s, *ndxs):
"""String sub-class with a split() method that splits a given indexes.
Usage:
>>> print split_string('D2008022002', 1, 5, 7, 9)
['D', '2008', '02', '20', '02']
"""
if len(ndxs) == 0:
return [s]
if len(ndxs) == 1:
i = ndxs[0]
return [s[:i], s[i:]]
res = []
b = 0
while ndxs:
a, b, ndxs = b, ndxs[0], ndxs[1:]
res.append(s[a:b])
res.append(s[b:])
return res
def split_fields(s, sizes):
"""Split a string into fields based on field `sizes`.
"""
slen = len(s)
if None in sizes:
nonesize = slen - sum(v for v in sizes if v is not None)
sizes = [v or nonesize for v in sizes]
ndxs = [sizes[0]]
cur = 1
while cur < len(sizes) - 1:
ndxs.append(ndxs[-1] + sizes[cur])
cur += 1
return split_string(s, *ndxs)
class pset(OrderedDict):
"""A property set is an OrderedDict with prettier string display
(useful when working with record lengths that are wider than your
terminal).
"""
def __repr__(self):
return '{%s}' % ', '.join('%s: %r' % (str(k), str(v))
for k,v in self.items())
def __str__(self):
return "{\n%s\n}" % ',\n'.join(' %s: %r' % (str(k), str(v))
for k,v in self.items())
def pad(data, size, padchar=' '):
"""Pad the `data` to exactly length = `size`.
"""
if len(data) > size:
raise ValueError("Data is longer than size, cannot pad.")
if len(data) == size:
return data
return data + padchar * (size - len(data))
| thebjorn/fixedrec | fixedrec/utils.py | Python | mit | 2,032 | 0.001476 |
import inspect
import time
from collections import defaultdict
_method_time_logs = defaultdict(list)
def log_method_begin():
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
caller_name = "{}: {}".format(calframe[1].filename.split('/')[-1], calframe[1].function)
_method_time_logs[caller_name].append(time.time())
print("{}: begin".format(caller_name))
def log_method_end():
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
caller_name = "{}: {}".format(calframe[1].filename.split('/')[-1], calframe[1].function)
if caller_name in _method_time_logs:
logs = _method_time_logs[caller_name]
if len(logs) > 0:
print("{}: end ({:.3}s)".format(caller_name, time.time() - logs[-1]))
logs.pop()
else:
print("{}: end".format(caller_name))
| reo7sp/vk-text-likeness | vk_text_likeness/logs.py | Python | apache-2.0 | 889 | 0.003375 |
"""Classes to handle API queries/searches"""
import requests
from ticketpy.model import Venue, Event, Attraction, Classification
class BaseQuery:
"""Base query/parent class for specific serach types."""
#: Maps parameter names to parameters expected by the API
#: (ex: *market_id* maps to *marketId*)
attr_map = {
'start_date_time': 'startDateTime',
'end_date_time': 'endDateTime',
'onsale_start_date_time': 'onsaleStartDateTime',
'onsale_end_date_time': 'onsaleEndDateTime',
'country_code': 'countryCode',
'state_code': 'stateCode',
'venue_id': 'venueId',
'attraction_id': 'attractionId',
'segment_id': 'segmentId',
'segment_name': 'segmentName',
'classification_name': 'classificationName',
'classification_id': 'classificationId',
'market_id': 'marketId',
'promoter_id': 'promoterId',
'dma_id': 'dmaId',
'include_tba': 'includeTBA',
'include_tbd': 'includeTBD',
'client_visibility': 'clientVisibility',
'include_test': 'includeTest',
'keyword': 'keyword',
'id': 'id',
'sort': 'sort',
'page': 'page',
'size': 'size',
'locale': 'locale',
'latlong': 'latlong',
'radius': 'radius'
}
def __init__(self, api_client, method, model):
"""
:param api_client: Instance of ``ticketpy.client.ApiClient``
:param method: API method (ex: *events*, *venues*...)
:param model: Model from ``ticketpy.model``. Either
``Event``, ``Venue``, ``Attraction`` or ``Classification``
"""
self.api_client = api_client
self.method = method
self.model = model
def __get(self, **kwargs):
"""Sends final request to ``ApiClient``"""
response = self.api_client.search(self.method, **kwargs)
return response
def _get(self, keyword=None, entity_id=None, sort=None, include_test=None,
page=None, size=None, locale=None, **kwargs):
"""Basic API search request, with only the parameters common to all
search functions. Specific searches pass theirs through **kwargs.
:param keyword: Keyword to search on
:param entity_id: ID of the object type (such as an event ID...)
:param sort: Sort method
:param include_test: ['yes', 'no', 'only'] to include test objects in
results. Default: *no*
:param page: Page to return (default: 0)
:param size: Page size (default: 20)
:param locale: Locale (default: *en*)
:param kwargs: Additional search parameters
:return:
"""
# Combine universal parameters and supplied kwargs into single dict,
# then map our parameter names to the ones expected by the API and
# make the final request
search_args = dict(kwargs)
search_args.update({
'keyword': keyword,
'id': entity_id,
'sort': sort,
'include_test': include_test,
'page': page,
'size': size,
'locale': locale
})
params = self._search_params(**search_args)
return self.__get(**params)
def by_id(self, entity_id):
"""Get a specific object by its ID"""
get_tmpl = "{}/{}/{}"
get_url = get_tmpl.format(self.api_client.url, self.method, entity_id)
r = requests.get(get_url, params=self.api_client.api_key)
r_json = self.api_client._handle_response(r)
return self.model.from_json(r_json)
def _search_params(self, **kwargs):
"""Returns API-friendly search parameters from kwargs
Maps parameter names to ``self.attr_map`` and removes
paramters == ``None``
:param kwargs: Keyword arguments
:return: API-friendly parameters
"""
# Update search parameters with kwargs
kw_map = {}
for k, v in kwargs.items():
# If arg is API-friendly (ex: stateCode='GA')
if k in self.attr_map.keys():
kw_map[self.attr_map[k]] = v
elif k in self.attr_map.values():
kw_map[k] = v
else:
kw_map[k] = v
return {k: v for (k, v) in kw_map.items() if v is not None}
class AttractionQuery(BaseQuery):
"""Query class for Attractions"""
def __init__(self, api_client):
self.api_client = api_client
super().__init__(api_client, 'attractions', Attraction)
def find(self, sort=None, keyword=None, attraction_id=None,
source=None, include_test=None, page=None, size=None,
locale=None, **kwargs):
"""
:param sort: Response sort type (API default: *name,asc*)
:param keyword:
:param attraction_id:
:param source:
:param include_test: Include test attractions (['yes', 'no', 'only'])
:param page:
:param size:
:param locale: API default: *en*
:param kwargs:
:return:
"""
return self._get(keyword, attraction_id, sort, include_test,
page, size, locale, source=source, **kwargs)
class ClassificationQuery(BaseQuery):
"""Classification search/query class"""
def __init__(self, api_client):
super().__init__(api_client, 'classifications', Classification)
def find(self, sort=None, keyword=None, classification_id=None,
source=None, include_test=None, page=None, size=None,
locale=None, **kwargs):
"""Search classifications
:param sort: Response sort type (API default: *name,asc*)
:param keyword:
:param classification_id:
:param source:
:param include_test: Include test classifications
(['yes', 'no', 'only'])
:param page:
:param size:
:param locale: API default: *en*
:param kwargs:
:return:
"""
return self._get(keyword, classification_id, sort, include_test,
page, size, locale, source=source, **kwargs)
def segment_by_id(self, segment_id):
"""Return a ``Segment`` matching this ID"""
return self.by_id(segment_id).segment
def genre_by_id(self, genre_id):
"""Return a ``Genre`` matching this ID"""
genre = None
resp = self.by_id(genre_id)
if resp.segment:
for genre in resp.segment.genres:
if genre.id == genre_id:
genre = genre
return genre
def subgenre_by_id(self, subgenre_id):
"""Return a ``SubGenre`` matching this ID"""
subgenre = None
segment = self.by_id(subgenre_id).segment
if segment:
subgenres = [
subg for genre in segment.genres
for subg in genre.subgenres
]
for subg in subgenres:
if subg.id == subgenre_id:
subgenre = subg
return subgenre
class EventQuery(BaseQuery):
"""Abstraction to search API for events"""
def __init__(self, api_client):
super().__init__(api_client, 'events', Event)
def find(self, sort='date,asc', latlong=None, radius=None, unit=None,
start_date_time=None, end_date_time=None,
onsale_start_date_time=None, onsale_end_date_time=None,
country_code=None, state_code=None, venue_id=None,
attraction_id=None, segment_id=None, segment_name=None,
classification_name=None, classification_id=None,
market_id=None, promoter_id=None, dma_id=None,
include_tba=None, include_tbd=None, client_visibility=None,
keyword=None, event_id=None, source=None, include_test=None,
page=None, size=None, locale=None, **kwargs):
"""Search for events matching given criteria.
:param sort: Sorting order of search result
(default: *'relevance,desc'*)
:param latlong: Latitude/longitude filter
:param radius: Radius of area to search
:param unit: Unit of radius, 'miles' or 'km' (default: miles)
:param start_date_time: Filter by start date/time.
Timestamp format: *YYYY-MM-DDTHH:MM:SSZ*
:param end_date_time: Filter by end date/time.
Timestamp format: *YYYY-MM-DDTHH:MM:SSZ*
:param onsale_start_date_time:
:param onsale_end_date_time:
:param country_code:
:param state_code: State code (ex: 'GA' not 'Georgia')
:param venue_id: Find events for provided venue ID
:param attraction_id:
:param segment_id:
:param segment_name:
:param classification_name: Filter events by a list of
classification name(s) (genre/subgenre/type/subtype/segment)
:param classification_id:
:param market_id:
:param promoter_id:
:param dma_id:
:param include_tba: True to include events with a to-be-announced
date (['yes', 'no', 'only'])
:param include_tbd: True to include an event with a date to be
defined (['yes', 'no', 'only'])
:param client_visibility:
:param keyword:
:param event_id: Event ID to search
:param source: Filter entities by source name: ['ticketmaster',
'universe', 'frontgate', 'tmr']
:param include_test: 'yes' to include test entities in the
response. False or 'no' to exclude. 'only' to return ONLY test
entities. (['yes', 'no', 'only'])
:param page: Page number to get (default: 0)
:param size: Size of page (default: 20)
:param locale: Locale (default: 'en')
:return:
"""
return self._get(keyword, event_id, sort, include_test, page,
size, locale, latlong=latlong, radius=radius,
unit=unit, start_date_time=start_date_time,
end_date_time=end_date_time,
onsale_start_date_time=onsale_start_date_time,
onsale_end_date_time=onsale_end_date_time,
country_code=country_code, state_code=state_code,
venue_id=venue_id, attraction_id=attraction_id,
segment_id=segment_id, segment_name=segment_name,
classification_name=classification_name,
classification_id=classification_id,
market_id=market_id, promoter_id=promoter_id,
dma_id=dma_id, include_tba=include_tba,
include_tbd=include_tbd, source=source,
client_visibility=client_visibility, **kwargs)
def by_location(self, latitude, longitude, radius='10', unit='miles',
sort='relevance,desc', **kwargs):
"""Search events within a radius of a latitude/longitude coordinate.
:param latitude: Latitude of radius center
:param longitude: Longitude of radius center
:param radius: Radius to search outside given latitude/longitude
:param unit: Unit of radius ('miles' or 'km'),
:param sort: Sort method. (Default: *relevance, desc*). If changed,
you may get wonky results (*date, asc* returns far-away events)
:return: List of events within that area
"""
latitude = str(latitude)
longitude = str(longitude)
radius = str(radius)
latlong = "{lat},{long}".format(lat=latitude, long=longitude)
return self.find(
latlong=latlong,
radius=radius,
unit=unit,
sort=sort,
**kwargs
)
class VenueQuery(BaseQuery):
"""Queries for venues"""
def __init__(self, api_client):
super().__init__(api_client, 'venues', Venue)
def find(self, keyword=None, venue_id=None, sort=None, state_code=None,
country_code=None, source=None, include_test=None,
page=None, size=None, locale=None, **kwargs):
"""Search for venues matching provided parameters
:param keyword: Keyword to search on (such as part of the venue name)
:param venue_id: Venue ID
:param sort: Sort method for response (API default: 'name,asc')
:param state_code: Filter by state code (ex: 'GA' not 'Georgia')
:param country_code: Filter by country code
:param source: Filter entities by source (['ticketmaster', 'universe',
'frontgate', 'tmr'])
:param include_test: ['yes', 'no', 'only'], whether to include
entities flagged as test in the response (default: 'no')
:param page: Page number (default: 0)
:param size: Page size of the response (default: 20)
:param locale: Locale (default: 'en')
:return: Venues found matching criteria
:rtype: ``ticketpy.PagedResponse``
"""
return self._get(keyword, venue_id, sort, include_test, page,
size, locale, state_code=state_code,
country_code=country_code, source=source, **kwargs)
def by_name(self, venue_name, state_code=None, **kwargs):
"""Search for a venue by name.
:param venue_name: Venue name to search
:param state_code: Two-letter state code to narrow results (ex 'GA')
(default: None)
:return: List of venues found matching search criteria
"""
return self.find(keyword=venue_name, state_code=state_code, **kwargs)
| arcward/ticketpy | ticketpy/query.py | Python | mit | 13,726 | 0.003643 |
#
# This file is part of opsd.
#
# opsd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# opsd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with opsd. If not, see <http://www.gnu.org/licenses/>.
"""Telescope action to park the telescope"""
from warwick.observatory.operations import TelescopeAction, TelescopeActionStatus
from warwick.observatory.operations.actions.superwasp.telescope_helpers import tel_stop, tel_park
class ParkTelescope(TelescopeAction):
"""Telescope action to park the telescope"""
def __init__(self, log_name):
super().__init__('Park Telescope', log_name, {})
def run_thread(self):
"""Thread that runs the hardware actions"""
if not tel_stop(self.log_name):
self.status = TelescopeActionStatus.Error
return
if not tel_park(self.log_name):
self.status = TelescopeActionStatus.Error
return
self.status = TelescopeActionStatus.Complete
| warwick-one-metre/opsd | warwick/observatory/operations/actions/superwasp/park_telescope.py | Python | gpl-3.0 | 1,410 | 0.001418 |
import argparse
class Wait(object):
@staticmethod
def add_parser(parser):
parser.add_parser('wait')
def __init__(self, dung):
self.dung = dung
def run(self):
self.dung.wait_for_it()
| dgholz/dung | dung/command/wait.py | Python | mit | 226 | 0.00885 |
import dbmanager
def main(args, config):
db = dbmanager.dbmanager(config.find('dbmanager'))
if args.make_migration:
db.make_migration()
| vyacheslav-bezborodov/skt | stockviewer/stockviewer/db/main.py | Python | mit | 142 | 0.028169 |
#!/usr/bin/env python
import sys
import PAM
from getpass import getpass
def pam_conv(auth, query_list, userData):
resp = []
for i in range(len(query_list)):
query, type = query_list[i]
if type == PAM.PAM_PROMPT_ECHO_ON:
val = raw_input(query)
resp.append((val, 0))
elif type == PAM.PAM_PROMPT_ECHO_OFF:
val = getpass(query)
resp.append((val, 0))
elif type == PAM.PAM_PROMPT_ERROR_MSG or type == PAM.PAM_PROMPT_TEXT_INFO:
print query
resp.append(('', 0))
else:
return None
return resp
service = 'passwd'
if len(sys.argv) == 2:
user = sys.argv[1]
else:
user = None
auth = PAM.pam()
auth.start(service)
if user != None:
auth.set_item(PAM.PAM_USER, user)
auth.set_item(PAM.PAM_CONV, pam_conv)
try:
auth.authenticate()
auth.acct_mgmt()
except PAM.error, resp:
print 'Go away! (%s)' % resp
except:
print 'Internal error'
else:
print 'Good to go!'
| unix4you2/practico | mod/pam/pam_nativo.py | Python | gpl-3.0 | 891 | 0.030303 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: nxos_interface
extends_documentation_fragment: nxos
version_added: "2.1"
short_description: Manages physical attributes of interfaces.
description:
- Manages physical attributes of interfaces of NX-OS switches.
author:
- Jason Edelman (@jedelman8)
- Trishna Guha (@trishnaguha)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- This module is also used to create logical interfaces such as
svis and loopbacks.
- Be cautious of platform specific idiosyncrasies. For example,
when you default a loopback interface, the admin state toggles
on certain versions of NX-OS.
- The M(nxos_overlay_global) C(anycast_gateway_mac) attribute must be
set before setting the C(fabric_forwarding_anycast_gateway) property.
options:
name:
description:
- Full name of interface, i.e. Ethernet1/1, port-channel10.
required: true
aliases: [interface]
interface_type:
description:
- Interface type to be unconfigured from the device.
choices: ['loopback', 'portchannel', 'svi', 'nve']
version_added: 2.2
speed:
description:
- Interface link speed. Applicable for ethernet interface only.
version_added: 2.5
admin_state:
description:
- Administrative state of the interface.
default: up
choices: ['up','down']
description:
description:
- Interface description.
mode:
description:
- Manage Layer 2 or Layer 3 state of the interface.
This option is supported for ethernet and portchannel interface.
Applicable for ethernet and portchannel interface only.
choices: ['layer2','layer3']
mtu:
description:
- MTU for a specific interface. Must be an even number between 576 and 9216.
Applicable for ethernet interface only.
version_added: 2.5
ip_forward:
description:
- Enable/Disable ip forward feature on SVIs.
choices: ['enable','disable']
version_added: 2.2
fabric_forwarding_anycast_gateway:
description:
- Associate SVI with anycast gateway under VLAN configuration mode.
Applicable for SVI interface only.
type: bool
version_added: 2.2
duplex:
description:
- Interface link status. Applicable for ethernet interface only.
default: auto
choices: ['full', 'half', 'auto']
version_added: 2.5
tx_rate:
description:
- Transmit rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
version_added: 2.5
rx_rate:
description:
- Receiver rate in bits per second (bps).
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
version_added: 2.5
neighbors:
description:
- Check the operational state of given interface C(name) for LLDP neighbor.
- The following suboptions are available. This is state check parameter only.
suboptions:
host:
description:
- "LLDP neighbor host for given interface C(name)."
port:
description:
- "LLDP neighbor port to which given interface C(name) is connected."
version_added: 2.5
aggregate:
description: List of Interfaces definitions.
version_added: 2.5
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present','absent','default']
delay:
description:
- Time in seconds to wait before checking for the operational state on remote
device. This wait is applicable for operational state arguments.
default: 10
"""
EXAMPLES = """
- name: Ensure an interface is a Layer 3 port and that it has the proper description
nxos_interface:
name: Ethernet1/1
description: 'Configured by Ansible'
mode: layer3
- name: Admin down an interface
nxos_interface:
name: Ethernet2/1
admin_state: down
- name: Remove all loopback interfaces
nxos_interface:
name: loopback
state: absent
- name: Remove all logical interfaces
nxos_interface:
interface_type: "{{ item }} "
state: absent
loop:
- loopback
- portchannel
- svi
- nve
- name: Admin up all loopback interfaces
nxos_interface:
name: loopback 0-1023
admin_state: up
- name: Admin down all loopback interfaces
nxos_interface:
name: looback 0-1023
admin_state: down
- name: Check neighbors intent arguments
nxos_interface:
name: Ethernet2/3
neighbors:
- port: Ethernet2/3
host: abc.mycompany.com
- name: Add interface using aggregate
nxos_interface:
aggregate:
- { name: Ethernet0/1, mtu: 256, description: test-interface-1 }
- { name: Ethernet0/2, mtu: 516, description: test-interface-2 }
duplex: full
speed: 100
state: present
- name: Delete interface using aggregate
nxos_interface:
aggregate:
- name: Loopback9
- name: Loopback10
state: absent
- name: Check intent arguments
nxos_interface:
name: Ethernet0/2
state: up
tx_rate: ge(0)
rx_rate: le(0)
"""
RETURN = """
commands:
description: command list sent to the device
returned: always
type: list
sample:
- interface Ethernet2/3
- mtu 1500
- speed 10
"""
import re
import time
from copy import deepcopy
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, normalize_interface
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import conditional, remove_default_spec
def execute_show_command(command, module):
if 'show run' not in command:
output = 'json'
else:
output = 'text'
cmds = [{
'command': command,
'output': output,
}]
body = run_commands(module, cmds, check_rc=False)
if body and "Invalid" in body[0]:
return []
else:
return body
def search_obj_in_list(name, lst):
for o in lst:
if o['name'] == name:
return o
return None
def get_interface_type(interface):
"""Gets the type of interface
"""
if interface.upper().startswith('ET'):
return 'ethernet'
elif interface.upper().startswith('VL'):
return 'svi'
elif interface.upper().startswith('LO'):
return 'loopback'
elif interface.upper().startswith('MG'):
return 'management'
elif interface.upper().startswith('MA'):
return 'management'
elif interface.upper().startswith('PO'):
return 'portchannel'
elif interface.upper().startswith('NV'):
return 'nve'
else:
return 'unknown'
def get_interfaces_dict(module):
"""Gets all active interfaces on a given switch
"""
try:
body = execute_show_command('show interface', module)[0]
except IndexError:
return {}
interfaces = {
'ethernet': [],
'svi': [],
'loopback': [],
'management': [],
'portchannel': [],
'nve': [],
'unknown': []
}
if body:
interface_list = body['TABLE_interface']['ROW_interface']
for index in interface_list:
intf = index['interface']
intf_type = get_interface_type(intf)
interfaces[intf_type].append(intf)
return interfaces
def get_vlan_interface_attributes(name, intf_type, module):
""" Returns dictionary that has two k/v pairs:
admin_state & description if not an svi, returns None
"""
command = 'show run interface {0} all'.format(name)
try:
body = execute_show_command(command, module)[0]
except IndexError:
return None
if body:
command_list = body.split('\n')
desc = None
admin_state = 'down'
for each in command_list:
if 'description' in each:
desc = each.lstrip().split("description")[1].lstrip()
elif 'no shutdown' in each:
admin_state = 'up'
return dict(description=desc, admin_state=admin_state)
else:
return None
def get_interface_type_removed_cmds(interfaces):
commands = []
for interface in interfaces:
if interface != 'Vlan1':
commands.append('no interface {0}'.format(interface))
return commands
def get_admin_state(admin_state):
command = ''
if admin_state == 'up':
command = 'no shutdown'
elif admin_state == 'down':
command = 'shutdown'
return command
def is_default_interface(name, module):
"""Checks to see if interface exists and if it is a default config
"""
command = 'show run interface {0}'.format(name)
try:
body = execute_show_command(command, module)[0]
except (IndexError, TypeError) as e:
body = ''
if body:
raw_list = body.split('\n')
found = False
for line in raw_list:
if line.startswith('interface'):
found = True
if found and line and not line.startswith('interface'):
return False
return True
else:
return 'DNE'
def add_command_to_interface(interface, cmd, commands):
if interface not in commands:
commands.append(interface)
commands.append(cmd)
def map_obj_to_commands(updates, module):
commands = list()
commands2 = list()
want, have = updates
args = ('speed', 'description', 'duplex', 'mtu')
for w in want:
name = w['name']
mode = w['mode']
ip_forward = w['ip_forward']
fabric_forwarding_anycast_gateway = w['fabric_forwarding_anycast_gateway']
admin_state = w['admin_state']
state = w['state']
interface_type = w['interface_type']
del w['state']
if name:
w['interface_type'] = None
if interface_type:
obj_in_have = {}
if state in ('present', 'default'):
module.fail_json(msg='The interface_type param can be used only with state absent.')
else:
obj_in_have = search_obj_in_list(name, have)
is_default = is_default_interface(name, module)
if name:
interface = 'interface ' + name
if state == 'absent':
if obj_in_have:
commands.append('no interface {0}'.format(name))
elif interface_type and not obj_in_have:
intfs = get_interfaces_dict(module)[interface_type]
cmds = get_interface_type_removed_cmds(intfs)
commands.extend(cmds)
elif state == 'present':
if obj_in_have:
# Don't run switchport command for loopback and svi interfaces
if get_interface_type(name) in ('ethernet', 'portchannel'):
if mode == 'layer2' and mode != obj_in_have.get('mode'):
add_command_to_interface(interface, 'switchport', commands)
elif mode == 'layer3' and mode != obj_in_have.get('mode'):
add_command_to_interface(interface, 'no switchport', commands)
if admin_state == 'up' and admin_state != obj_in_have.get('admin_state'):
add_command_to_interface(interface, 'no shutdown', commands)
elif admin_state == 'down' and admin_state != obj_in_have.get('admin_state'):
add_command_to_interface(interface, 'shutdown', commands)
if ip_forward == 'enable' and ip_forward != obj_in_have.get('ip_forward'):
add_command_to_interface(interface, 'ip forward', commands)
elif ip_forward == 'disable' and ip_forward != obj_in_have.get('ip forward'):
add_command_to_interface(interface, 'no ip forward', commands)
if (fabric_forwarding_anycast_gateway is True and
obj_in_have.get('fabric_forwarding_anycast_gateway') is False):
add_command_to_interface(interface, 'fabric forwarding mode anycast-gateway', commands)
elif (fabric_forwarding_anycast_gateway is False and
obj_in_have.get('fabric_forwarding_anycast_gateway') is True):
add_command_to_interface(interface, 'no fabric forwarding mode anycast-gateway', commands)
for item in args:
candidate = w.get(item)
if candidate and candidate != obj_in_have.get(item):
cmd = item + ' ' + str(candidate)
add_command_to_interface(interface, cmd, commands)
if name and get_interface_type(name) == 'ethernet':
if mode != obj_in_have.get('mode'):
admin_state = w.get('admin_state') or obj_in_have.get('admin_state')
if admin_state:
c1 = 'interface {0}'.format(normalize_interface(w['name']))
c2 = get_admin_state(admin_state)
commands2.append(c1)
commands2.append(c2)
else:
commands.append(interface)
# Don't run switchport command for loopback and svi interfaces
if get_interface_type(name) in ('ethernet', 'portchannel'):
if mode == 'layer2':
commands.append('switchport')
elif mode == 'layer3':
commands.append('no switchport')
if admin_state == 'up':
commands.append('no shutdown')
elif admin_state == 'down':
commands.append('shutdown')
if ip_forward == 'enable':
commands.append('ip forward')
elif ip_forward == 'disable':
commands.append('no ip forward')
if fabric_forwarding_anycast_gateway is True:
commands.append('fabric forwarding mode anycast-gateway')
elif fabric_forwarding_anycast_gateway is False:
commands.append('no fabric forwarding mode anycast-gateway')
for item in args:
candidate = w.get(item)
if candidate:
commands.append(item + ' ' + str(candidate))
elif state == 'default':
if is_default is False:
commands.append('default interface {0}'.format(name))
elif is_default == 'DNE':
module.exit_json(msg='interface you are trying to default does not exist')
return commands, commands2
def map_params_to_obj(module):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
d = item.copy()
name = d['name']
d['name'] = normalize_interface(name)
obj.append(d)
else:
obj.append({
'name': normalize_interface(module.params['name']),
'description': module.params['description'],
'speed': module.params['speed'],
'mode': module.params['mode'],
'mtu': module.params['mtu'],
'duplex': module.params['duplex'],
'ip_forward': module.params['ip_forward'],
'fabric_forwarding_anycast_gateway': module.params['fabric_forwarding_anycast_gateway'],
'admin_state': module.params['admin_state'],
'state': module.params['state'],
'interface_type': module.params['interface_type'],
'tx_rate': module.params['tx_rate'],
'rx_rate': module.params['rx_rate'],
'neighbors': module.params['neighbors']
})
return obj
def map_config_to_obj(want, module):
objs = list()
for w in want:
obj = dict(name=None, description=None, admin_state=None, speed=None,
mtu=None, mode=None, duplex=None, interface_type=None,
ip_forward=None, fabric_forwarding_anycast_gateway=None)
if not w['name']:
return obj
command = 'show interface {0}'.format(w['name'])
try:
body = execute_show_command(command, module)[0]
except IndexError:
return list()
if body:
try:
interface_table = body['TABLE_interface']['ROW_interface']
except KeyError:
return list()
if interface_table:
if interface_table.get('eth_mode') == 'fex-fabric':
module.fail_json(msg='nxos_interface does not support interfaces with mode "fex-fabric"')
intf_type = get_interface_type(w['name'])
if intf_type in ['portchannel', 'ethernet']:
if not interface_table.get('eth_mode'):
obj['mode'] = 'layer3'
if intf_type == 'ethernet':
obj['name'] = normalize_interface(interface_table.get('interface'))
obj['admin_state'] = interface_table.get('admin_state')
obj['description'] = interface_table.get('desc')
obj['mtu'] = interface_table.get('eth_mtu')
obj['duplex'] = interface_table.get('eth_duplex')
speed = interface_table.get('eth_speed')
mode = interface_table.get('eth_mode')
if mode in ('access', 'trunk'):
obj['mode'] = 'layer2'
elif mode in ('routed', 'layer3'):
obj['mode'] = 'layer3'
command = 'show run interface {0}'.format(obj['name'])
body = execute_show_command(command, module)[0]
speed_match = re.search(r'speed (\d+)', body)
if speed_match is None:
obj['speed'] = 'auto'
else:
obj['speed'] = speed_match.group(1)
duplex_match = re.search(r'duplex (\S+)', body)
if duplex_match is None:
obj['duplex'] = 'auto'
else:
obj['duplex'] = duplex_match.group(1)
if 'ip forward' in body:
obj['ip_forward'] = 'enable'
else:
obj['ip_forward'] = 'disable'
elif intf_type == 'svi':
obj['name'] = normalize_interface(interface_table.get('interface'))
attributes = get_vlan_interface_attributes(obj['name'], intf_type, module)
obj['admin_state'] = str(attributes.get('admin_state',
'nxapibug'))
obj['description'] = str(attributes.get('description',
'nxapi_bug'))
command = 'show run interface {0}'.format(obj['name'])
body = execute_show_command(command, module)[0]
if 'ip forward' in body:
obj['ip_forward'] = 'enable'
else:
obj['ip_forward'] = 'disable'
if 'fabric forwarding mode anycast-gateway' in body:
obj['fabric_forwarding_anycast_gateway'] = True
else:
obj['fabric_forwarding_anycast_gateway'] = False
elif intf_type in ('loopback', 'management', 'nve'):
obj['name'] = normalize_interface(interface_table.get('interface'))
obj['admin_state'] = interface_table.get('admin_state')
obj['description'] = interface_table.get('desc')
elif intf_type == 'portchannel':
obj['name'] = normalize_interface(interface_table.get('interface'))
obj['admin_state'] = interface_table.get('admin_state')
obj['description'] = interface_table.get('desc')
obj['mode'] = interface_table.get('eth_mode')
objs.append(obj)
return objs
def check_declarative_intent_params(module, want):
failed_conditions = []
have_neighbors = None
for w in want:
want_tx_rate = w.get('tx_rate')
want_rx_rate = w.get('rx_rate')
want_neighbors = w.get('neighbors')
time.sleep(module.params['delay'])
if w['interface_type']:
return
cmd = [{'command': 'show interface {0}'.format(w['name']), 'output': 'text'}]
output = run_commands(module, cmd, check_rc=False)
if output:
out = output[0]
else:
out = ''
if want_tx_rate:
match = re.search(r'output rate (\d+)', out, re.M)
have_tx_rate = None
if match:
have_tx_rate = match.group(1)
if have_tx_rate is None or not conditional(want_tx_rate, have_tx_rate.strip(), cast=int):
failed_conditions.append('tx_rate ' + want_tx_rate)
if want_rx_rate:
match = re.search(r'input rate (\d+)', out, re.M)
have_rx_rate = None
if match:
have_rx_rate = match.group(1)
if have_rx_rate is None or not conditional(want_rx_rate, have_rx_rate.strip(), cast=int):
failed_conditions.append('rx_rate ' + want_rx_rate)
if want_neighbors:
have_host = []
have_port = []
if have_neighbors is None:
cmd = [{'command': 'show lldp neighbors interface {0} detail'.format(w['name']), 'output': 'text'}]
output = run_commands(module, cmd, check_rc=False)
if output:
have_neighbors = output[0]
else:
have_neighbors = ''
if have_neighbors and 'Total entries displayed: 0' not in have_neighbors:
for line in have_neighbors.strip().split('\n'):
if line.startswith('Port Description'):
have_port.append(line.split(': ')[1])
if line.startswith('System Name'):
have_host.append(line.split(': ')[1])
for item in want_neighbors:
host = item.get('host')
port = item.get('port')
if host and host not in have_host:
failed_conditions.append('host ' + host)
if port and port not in have_port:
failed_conditions.append('port ' + port)
return failed_conditions
def main():
""" main entry point for module execution
"""
neighbors_spec = dict(
host=dict(),
port=dict()
)
element_spec = dict(
name=dict(aliases=['interface']),
admin_state=dict(default='up', choices=['up', 'down']),
description=dict(),
speed=dict(),
mode=dict(choices=['layer2', 'layer3']),
mtu=dict(),
duplex=dict(choices=['full', 'half', 'auto']),
interface_type=dict(choices=['loopback', 'portchannel', 'svi', 'nve']),
ip_forward=dict(choices=['enable', 'disable']),
fabric_forwarding_anycast_gateway=dict(type='bool'),
tx_rate=dict(),
rx_rate=dict(),
neighbors=dict(type='list', elements='dict', options=neighbors_spec),
delay=dict(default=10, type='int'),
state=dict(choices=['absent', 'present', 'default'], default='present')
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec,
mutually_exclusive=[['name', 'interface_type']])
)
argument_spec.update(element_spec)
argument_spec.update(nxos_argument_spec)
required_one_of = [['name', 'aggregate', 'interface_type']]
mutually_exclusive = [['name', 'aggregate'],
['name', 'interface_type']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(want, module)
commands = []
commands1, commands2 = map_obj_to_commands((want, have), module)
commands.extend(commands1)
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
# if the mode changes from L2 to L3, the admin state
# seems to change after the API call, so adding a second API
# call to ensure it's in the desired state.
if commands2:
load_config(module, commands2)
commands.extend(commands2)
commands = [cmd for cmd in commands if cmd != 'configure']
result['commands'] = commands
if result['changed']:
failed_conditions = check_declarative_intent_params(module, want)
if failed_conditions:
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
module.exit_json(**result)
if __name__ == '__main__':
main()
| mheap/ansible | lib/ansible/modules/network/nxos/nxos_interface.py | Python | gpl-3.0 | 26,385 | 0.001819 |
from django.test import TestCase, RequestFactory
from ddah_web.models import DDAHTemplate, DDAHInstanceWeb
from ddah_web import read_template_as_string
from ddah_web.views import MoustacheTemplateResponse
class InstanceTemplateTestCase(TestCase):
'''There is a template which contains the html to be represented using {{moustache}}'''
def setUp(self):
self.default_template = read_template_as_string('instance_templates/default.html')
self.default_template_flat_page = read_template_as_string('instance_templates/default_flat_page.html')
self.default_template_footer = read_template_as_string('instance_templates/partials/footer.html')
self.default_template_head = read_template_as_string('instance_templates/partials/head.html')
self.default_template_header = read_template_as_string('instance_templates/partials/header.html')
self.default_template_style = read_template_as_string('instance_templates/partials/style.html')
def test_create_a_template(self):
template = DDAHTemplate.objects.create()
self.assertEquals(template.content, self.default_template)
self.assertEquals(template.flat_page_content, self.default_template_flat_page)
self.assertEquals(template.head, self.default_template_head)
self.assertEquals(template.header, self.default_template_header)
self.assertEquals(template.style, self.default_template_style)
self.assertEquals(template.footer, self.default_template_footer)
def test_when_creating_an_instance_it_automatically_creates_a_template(self):
instance = DDAHInstanceWeb.objects.create(label="bici", title="Bicicletas")
self.assertTrue(instance.template)
self.assertEquals(instance.template.content, self.default_template)
self.assertEquals(instance.template.head, self.default_template_head)
self.assertEquals(instance.template.header, self.default_template_header)
self.assertEquals(instance.template.style, self.default_template_style)
self.assertEquals(instance.template.footer, self.default_template_footer)
class MustacheTemplateResponseTestCase(TestCase):
def setUp(self):
self.template = DDAHTemplate.objects.create(content="content {{> head }} {{> header }} {{> style }} {{> footer }}",
head="head",
header="header",
style="style",
footer="footer")
self.instance = DDAHInstanceWeb.objects.create(label="bici", title="Bicicletas")
self.instance.template = self.template
self.factory = RequestFactory()
def test_renderes_correctly(self):
request = self.factory.get('/')
response = MoustacheTemplateResponse(request, 'unused.html')
response.context_data = {
'instance': self.instance
}
rendered_text = "content head header style footer"
self.assertEquals(rendered_text, response.rendered_content)
| ciudadanointeligente/deldichoalhecho | ddah_web/tests/instance_template_tests.py | Python | gpl-3.0 | 3,123 | 0.004483 |
#!/usr/bin/env python3
import json
import os
import sys
import anchore_engine.analyzers.utils
analyzer_name = "file_package_verify"
try:
config = anchore_engine.analyzers.utils.init_analyzer_cmdline(
sys.argv, analyzer_name
)
except Exception as err:
print(str(err))
sys.exit(1)
imgname = config["imgid"]
imgid = config["imgid_full"]
outputdir = config["dirs"]["outputdir"]
unpackdir = config["dirs"]["unpackdir"]
squashtar = os.path.join(unpackdir, "squashed.tar")
meta = anchore_engine.analyzers.utils.get_distro_from_squashtar(
squashtar, unpackdir=unpackdir
)
distrodict = anchore_engine.analyzers.utils.get_distro_flavor(
meta["DISTRO"], meta["DISTROVERS"], likedistro=meta["LIKEDISTRO"]
)
flavor = distrodict["flavor"]
# gather file metadata from installed packages
result = {}
resultlist = {}
try:
if flavor == "RHEL":
try:
# result = rpm_get_file_package_metadata(unpackdir, record)
result = anchore_engine.analyzers.utils.rpm_get_file_package_metadata_from_squashtar(
unpackdir, squashtar
)
except Exception as err:
raise Exception("ERROR: " + str(err))
elif flavor == "DEB":
try:
# result = deb_get_file_package_metadata(unpackdir, record)
result = anchore_engine.analyzers.utils.dpkg_get_file_package_metadata_from_squashtar(
unpackdir, squashtar
)
except Exception as err:
raise Exception("ERROR: " + str(err))
elif flavor == "ALPINE":
try:
# result = apk_get_file_package_metadata(unpackdir, record)
result = anchore_engine.analyzers.utils.apk_get_file_package_metadata_from_squashtar(
unpackdir, squashtar
)
except Exception as err:
raise Exception("ERROR: " + str(err))
else:
# do nothing, flavor not supported for getting metadata about files from pkg manager
pass
except Exception as err:
print("WARN: analyzer unable to complete - exception: " + str(err))
result = {}
resultline = {}
if result:
for f in list(result.keys()):
try:
resultlist[f] = json.dumps(result[f], sort_keys=True)
except Exception as err:
print("WARN: " + str(err))
resultlist[f] = ""
if resultlist:
ofile = os.path.join(outputdir, "distro.pkgfilemeta")
anchore_engine.analyzers.utils.write_kvfile_fromdict(ofile, resultlist)
sys.exit(0)
| anchore/anchore-engine | anchore_engine/analyzers/modules/31_file_package_verify.py | Python | apache-2.0 | 2,520 | 0.001587 |
# -*- coding: utf-8 -*-
import sys,pymongo
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
from bson.code import Code
class MongoDBConfig:
def __init__(self, db_name, host):
self._db_name= db_name
self._host = host
class MongoDB:
'''Use mongo_client for a pooled mongodb'''
def __init__(self, config):
self.db_name = config._db_name
try:
self.connection = pymongo.MongoClient(host=config._host,auto_start_request=False)
except ConnectionFailure, e:
sys.stderr.write("Could not connect to MongoDB: %s" % e)
sys.exit(1)
def insert(self, doc, collection_name):
'insert a document into a collection'
db_handler = self.connection[self.db_name]
assert db_handler.connection == self.connection
with self.connection.start_request():
object_id = db_handler[collection_name].insert(doc, safe=True)
return object_id
def findOne(self, query, collection_name):
db_handler = self.connection[self.db_name]
assert db_handler.connection == self.connection
with self.connection.start_request():
result = db_handler[collection_name].find_one(query)
return result
def removeAll(self, collection_name):
db_handler = self.connection[self.db_name]
assert db_handler.connection == self.connection
with self.connection.start_request():
db_handler[collection_name].remove()
| vollov/py-lab | src/mongodb/__init__.py | Python | mit | 1,550 | 0.007097 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2015 Eric Beanland <[email protected]>
# This file is part of RecordSheet
#
# RecordSheet is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RecordSheet is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
from nose.tools import with_setup
from webtest import TestApp
from sqlalchemy import event
from sqlalchemy.engine import create_engine
from sqlalchemy.orm.session import Session
from RecordSheet.dbapi import Base
from RecordSheet import jsonapp, dbapi, dbmodel, plugins
app = TestApp(jsonapp.app, extra_environ={'beaker.session':{'user_id':1}})
from test import dbhelper
###############################################################################
def setup_module():
jsonapp.app.uninstall(plugins.CsrfPlugin)
jsonapp.app.uninstall(plugins.AuthPlugin)
dbhelper.setup_module()
def teardown_module():
dbhelper.teardown_module()
###############################################################################
def test_generic_collection():
url = '/accounts?sort=name.asc&sort=id.desc&limit=5&offset=1'
response = app.get(url)
assert response.status_int == 200
assert response.content_type == 'application/json'
def test_generic_collection_404():
response = app.get('/doesnotexist', status=404)
assert response.status_int == 404
assert response.content_type == 'application/json'
###############################################################################
def test_generic_item():
response = app.get('/accounts/2')
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
def test_generic_item_invalid_kind():
response = app.get('/doesnotexist/2', status=404)
assert response.status_int == 404
assert response.content_type == 'application/json'
def test_generic_item_invalid_id():
response = app.get('/accounts/0', status=404)
assert response.status_int == 404
assert response.content_type == 'application/json'
###############################################################################
data = {'name':'TEST145', 'desc':'test_145'}
def test_generic_put():
response = app.put_json('/accounts', data)
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
def test_generic_put_duplicate():
response = app.put_json('/accounts', data, status=400)
assert response.status_int == 400
assert response.content_type == 'application/json'
def test_generic_put_invalid_attr():
data['__table__'] = 'fubar'
response = app.put_json('/accounts', data, status=400)
assert response.content_type == 'application/json'
###############################################################################
def test_generic_post():
response = app.post_json('/accounts/1', {'desc':'hello'}, status='*')
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
def test_generic_post_invalid_id():
response = app.post_json('/accounts/0', {'desc':'hello'}, status=404)
assert response.content_type == 'application/json'
def test_generic_post_invalid_attr():
data = {'desc':'test', 'nxattr':1234}
response = app.post_json('/accounts/1', data, status=400)
assert response.content_type == 'application/json'
#FIXME needs to generate a Integrity exception serverside
#def test_generic_post_invalid_attr():
# response = app.post_json('/accounts/1', {'desc':1}, status=404)
# assert response.content_type == 'application/json'
###############################################################################
def test_journal_put():
posts = [{'amount':100, 'account_id':'TEST01'},
{'amount':-100, 'account_id':'TEST02', 'memo':'testing'}]
data = {'memo':'test journal entry',
'datetime':'2016-06-05 14:09:00-05',
'posts':posts}
response = app.put_json('/journal', data, status='*')
assert response.status_int == 200
assert response.content_type == 'application/json'
assert 'id' in response.json
###############################################################################
def test_imported_transactions_get():
response = app.get('/imported_transactions?limit=10&offset=0')
assert response.content_type == 'application/json'
assert 'imported_transactions' in response.json
###############################################################################
| ericbean/RecordSheet | test/test_jsonapp.py | Python | gpl-3.0 | 5,052 | 0.005542 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-05-03 17:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('oplan', '0007_auto_20160503_1638'),
]
operations = [
migrations.RemoveField(
model_name='aktermin',
name='constraintAfterEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintAfterEvents',
field=models.ManyToManyField(related_name='constraint_must_be_after_this', to='oplan.AKTermin', verbose_name='Nach Veranstaltung(en)'),
),
migrations.RemoveField(
model_name='aktermin',
name='constraintBeforeEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintBeforeEvents',
field=models.ManyToManyField(related_name='constraint_must_be_before_this', to='oplan.AKTermin', verbose_name='Vor Veranstaltung(en)'),
),
migrations.RemoveField(
model_name='aktermin',
name='constraintForceParallelWithEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintForceParallelWithEvents',
field=models.ManyToManyField(related_name='constraint_force_parallel_with_this', to='oplan.AKTermin', verbose_name='Gleichzeitig mit Veranstaltung(en)'),
),
migrations.RemoveField(
model_name='aktermin',
name='constraintNotParallelWithEvents',
),
migrations.AddField(
model_name='aktermin',
name='constraintNotParallelWithEvents',
field=models.ManyToManyField(related_name='constraint_not_parallel_with_this', to='oplan.AKTermin', verbose_name='Nicht gleichzeitig mit Veranstaltung(en)'),
),
migrations.RemoveField(
model_name='aktermin',
name='constraintRooms',
),
migrations.AddField(
model_name='aktermin',
name='constraintRooms',
field=models.ManyToManyField(blank=True, max_length=255, null=True, related_name='aktermin_constraint_room', to='oplan.Room', verbose_name='In einem der Räume'),
),
]
| d120/kifplan | oplan/migrations/0008_auto_20160503_1910.py | Python | agpl-3.0 | 2,321 | 0.002155 |
from . import vertical_lift_shuttle
| OCA/stock-logistics-warehouse | stock_vertical_lift_server_env/models/__init__.py | Python | agpl-3.0 | 36 | 0 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# world_builder.py
class BuildMap(object):
"""
Base class to actually build a map which is defined
by the 'World' object or Grid (to be decided).
This can mean printing the grid, display as image,
create the map in Minecraft or create in other virtual
environments.
"""
def __init__(self, struct_data, style):
"""
struct_data = details of coords to fill, make
style = details on colours, textures, if applicable
The assert checks that both are NOT strings, but should
be iterable lists / dicts or subclasses of
"""
assert not isinstance(struct_data, str)
assert not isinstance(style, str)
self.struct_data = struct_data
self.style = style
def __str__(self):
res = ''
res += 'BuildMap ' + '\n'
if type(self.struct_data) is list:
for l in self.struct_data:
res += 'data:' + str(l) + '\n'
else: # assume dictionary
for k,v in self.struct_data.items():
res += 'data:' + str(k) + ' = ' + str(v) + '\n'
if type(self.style) is list:
for l in self.style:
res += 'style:' + str(l) + '\n'
else: # assume dictionary
for k,v in self.style.items():
res += 'style:' + str(k) + ' = ' + str(v) + '\n'
return res
class BuildMapMineCraft(BuildMap):
"""
Interface with Minecraft (currently via sendkeys to server)
to create objects in the Minecraft world.
"""
def build(self, ip_file):
import minecraft_builder
minecraft_builder.make_structure(ip_file)
class BuildMapGrid(BuildMap):
"""
Not much to do here, simply returns a standard grid or
CSV formatted file of the world
to create objects in the Minecraft world.
"""
pass
class BuildMapImage(BuildMap):
"""
Generates an image of the world, which is a 2d grid, or
multiple images if multiple floors (not much point calling this
for a proper 3d world ( Unity) , but useful for castle/dungeon
game maps with multiple floors.
"""
pass
| acutesoftware/worldbuild | worldbuild/world_builder.py | Python | gpl-2.0 | 2,333 | 0.014145 |
#! ../env/bin/python
from flask import Flask
from webassets.loaders import PythonLoader as PythonAssetsLoader
from appname import assets
from appname.models import db
from appname.controllers.main import main
from appname.controllers.categories import categories
from appname.controllers.products import products
from appname.controllers.catalogs import catalogs
from flask_bootstrap import Bootstrap
from flask import send_from_directory
import os
from appname.extensions import (
cache,
assets_env,
debug_toolbar,
login_manager
)
def create_app(object_name):
"""
An flask application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
Arguments:
object_name: the python path of the config object,
e.g. appname.settings.ProdConfig
"""
app = Flask(__name__)
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory('/home/ahmad/workspace/python/Flask-CRUD/uploads/', filename)
Bootstrap(app)
app.config.from_object(object_name)
# initialize the cache
cache.init_app(app)
# initialize the debug tool bar
debug_toolbar.init_app(app)
# initialize SQLAlchemy
db.init_app(app)
db.app = app
login_manager.init_app(app)
# Import and register the different asset bundles
assets_env.init_app(app)
with app.app_context():
assets_env.load_path = [
os.path.join(os.path.join(os.path.dirname(__file__), os.pardir), 'node_modules'),
os.path.join(os.path.dirname(__file__), 'static'),
]
assets_loader = PythonAssetsLoader(assets)
for name, bundle in assets_loader.load_bundles().items():
assets_env.register(name, bundle)
# register our blueprints
app.register_blueprint(main)
app.register_blueprint(categories)
app.register_blueprint(products)
app.register_blueprint(catalogs)
return app
| ahmadpriatama/Flask-Simple-Ecommerce | appname/__init__.py | Python | bsd-2-clause | 1,969 | 0.001016 |
# coding: utf-8
# Simple chainer interfaces for Deep learning researching
# For autoencoder
# Author: Aiga SUZUKI <[email protected]>
import chainer
import chainer.functions as F
import chainer.optimizers as Opt
import numpy
from libdnn.nnbase import NNBase
from types import MethodType
from abc import abstractmethod
class AutoEncoder(NNBase):
def __init__(self, model, gpu=-1):
NNBase.__init__(self, model, gpu)
self.optimizer = Opt.Adam()
self.optimizer.setup(self.model)
self.loss_function = F.mean_squared_error
self.loss_param = {}
def validate(self, x_data, train=False):
y = self.forward(x_data, train=train)
if self.gpu >= 0:
x_data = chainer.cuda.to_gpu(x_data)
x = chainer.Variable(x_data)
return self.loss_function(x, y, **self.loss_param)
def train(self, x_data, batchsize=100, action=(lambda: None)):
N = len(x_data)
perm = numpy.random.permutation(N)
sum_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
self.optimizer.zero_grads()
err = self.validate(x_batch, train=True)
err.backward()
self.optimizer.update()
sum_error += float(chainer.cuda.to_cpu(err.data)) * len(x_batch)
action()
return sum_error / N
def test(self, x_data, batchsize=100, action=(lambda: None)):
N = len(x_data)
perm = numpy.random.permutation(N)
sum_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
err = self.validate(x_batch, train=False)
sum_error += float(chainer.cuda.to_cpu(err.data)) * batchsize
action()
return sum_error / N
class StackedAutoEncoder(AutoEncoder):
def __init__(self, model, gpu=-1):
self.sublayer = []
AutoEncoder.__init__(self, model, gpu)
def set_order(self, encl, decl):
if len(encl) != len(decl):
raise TypeError('Encode/Decode layers mismatch')
self.depth = len(encl)
for (el, dl) in zip(encl, reversed(decl)):
self.sublayer.append(chainer.FunctionSet(
enc=self.model[el],
dec=self.model[dl]
))
@abstractmethod
def __encode(self, x, layer, train):
pass
def set_encode(self, func):
self.__encode = MethodType(func, self, StackedAutoEncoder)
def encode(self, x_data, layer=None, train=False):
if self.gpu >= 0:
x_data = chainer.cuda.to_gpu(x_data)
x = chainer.Variable(x_data)
return self.__encode(x, layer, train)
@abstractmethod
def __decode(self, x, layer, train):
pass
def set_decode(self, func):
self.__decode = MethodType(func, self, StackedAutoEncoder)
def decode(self, x_data, layer=None, train=False):
if self.gpu >= 0:
x_data = chainer.cuda.to_gpu(x_data)
x = chainer.Variable(x_data)
return self.__decode(x, layer, train)
def forward(self, x_data, train=False):
code = self.encode(x_data, train=train)
y = self.__decode(code, train=train)
return y
def validate(self, x_data, layer=None, train=False):
targ = self.encode(x_data, layer - 1, train=False)
code = self.encode(x_data, layer, train=train)
y = self.__decode(code, layer, train=train)
return self.loss_function(targ, y, **self.loss_param)
def train(self, x_data, batchsize=100, action=(lambda: None)):
errs = []
N = len(x_data)
perm = numpy.random.permutation(N)
for l in range(1, self.depth + 1):
self.optimizer.setup(self.sublayer[l - 1])
sum_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
self.optimizer.zero_grads()
err = self.validate(x_batch, layer=l, train=True)
err.backward()
self.optimizer.update()
sum_error += float(chainer.cuda.to_cpu(err.data)) * len(x_batch)
action()
errs.append(sum_error / N)
return tuple(errs)
def test(self, x_data, batchsize=100, action=(lambda: None)):
N = len(x_data)
perm = numpy.random.permutation(N)
sum_error = 0.
for i in range(0, N, batchsize):
x_batch = x_data[perm[i:i + batchsize]]
y = self.forward(x_batch, train=False)
if self.gpu >= 0:
x_batch = chainer.cuda.to_gpu(x_batch)
x = chainer.Variable(x_batch)
err = self.loss_function(x, y, **self.loss_param)
sum_error += float(chainer.cuda.to_cpu(err.data)) * len(x_batch)
action()
return sum_error / N
| tochikuji/chainer-libDNN | libdnn/autoencoder.py | Python | mit | 4,929 | 0.000406 |
"""
mountains
~~~~~~~~~
Takes a CSV file either via local or HTTP retrieval and outputs information about the mountains according to spec.
Originally a programming skills check for a particular position. I've get it updated to current python versions
as well as packaging and testing methodologies.
:copyright: 2016-2017 Christian Erick Sauer.
:license: MIT, see LICENSE for more details.
""" # noqa
from .core import * # noqa
from .errors import * # noqa
| c17r/catalyst | src/mountains/__init__.py | Python | mit | 465 | 0 |
import tornado.ioloop
import tornado.web
import socket
import os
import sys
import time
import signal
# import datetime
import h5py
from datetime import datetime, date
import tornado.httpserver
from browserhandler import BrowseHandler
from annotationhandler import AnnotationHandler
from projecthandler import ProjectHandler
from helphandler import HelpHandler
from defaulthandler import DefaultHandler
base_path = os.path.dirname(__file__)
sys.path.insert(1,os.path.join(base_path, '../common'))
from utility import Utility
from database import Database
from paths import Paths
MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 0.5
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", DefaultHandler),
(r"/browse.*", BrowseHandler),
(r"/project.*", ProjectHandler),
(r"/annotate.*", AnnotationHandler),
(r'/help*', HelpHandler),
(r'/settings/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/settings/'}),
(r'/js/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/js/'}),
(r'/js/vendors/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/js/vendors/'}),
(r'/css/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/css/'}),
(r'/uikit/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/uikit/'}),
(r'/images/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/images/'}),
(r'/open-iconic/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/open-iconic/'}),
(r'/input/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
(r'/train/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
(r'/validate/(.*)', tornado.web.StaticFileHandler, {'path': 'resources/input/'}),
#(r"/annotate/(.*)", AnnotationHandler, dict(logic=self)),
]
settings = {
"template_path": 'resources',
"static_path": 'resources',
}
tornado.web.Application.__init__(self, handlers, **settings)
import numpy as np
class Server():
def __init__(self, name, port):
self.name = name
self.port = port
application = Application()
self.http_server = tornado.httpserver.HTTPServer( application )
hostname = socket.gethostname()
print 'hostname:', hostname
self.ip = hostname #socket.gethostbyname( hostname )
def print_status(self):
Utility.print_msg ('.')
Utility.print_msg ('\033[93m'+ self.name + ' running/' + '\033[0m', True)
Utility.print_msg ('.')
Utility.print_msg ('open ' + '\033[92m'+'http://' + self.ip + ':' + str(self.port) + '/' + '\033[0m', True)
Utility.print_msg ('.')
def start(self):
self.print_status()
self.http_server.listen( self.port )
tornado.ioloop.IOLoop.instance().start()
def stop(self):
msg = 'shutting down %s in %s seconds'%(self.name, MAX_WAIT_SECONDS_BEFORE_SHUTDOWN)
Utility.print_msg ('\033[93m'+ msg + '\033[0m', True)
io_loop = tornado.ioloop.IOLoop.instance()
deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN
def stop_loop():
now = time.time()
if now < deadline and (io_loop._callbacks or io_loop._timeouts):
io_loop.add_timeout(now + 1, stop_loop)
else:
io_loop.stop()
Utility.print_msg ('\033[93m'+ 'shutdown' + '\033[0m', True, 'done')
stop_loop()
def sig_handler(sig, frame):
msg = 'caught interrupt signal: %s'%sig
Utility.print_msg ('\033[93m'+ msg + '\033[0m', True)
tornado.ioloop.IOLoop.instance().add_callback(shutdown)
def shutdown():
server.stop()
def main():
global server
signal.signal(signal.SIGTERM, sig_handler)
signal.signal(signal.SIGINT, sig_handler)
port = 8888
name = 'icon webserver'
server = Server(name, port)
server.start()
if __name__ == "__main__":
main()
| fegonda/icon_demo | code/web/server.py | Python | mit | 3,926 | 0.018849 |
"""Support for exposing NX584 elements as sensors."""
import logging
import threading
import time
from nx584 import client as nx584_client
import requests
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_OPENING,
DEVICE_CLASSES,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_EXCLUDE_ZONES = "exclude_zones"
CONF_ZONE_TYPES = "zone_types"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = "5007"
DEFAULT_SSL = False
ZONE_TYPES_SCHEMA = vol.Schema({cv.positive_int: vol.In(DEVICE_CLASSES)})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_EXCLUDE_ZONES, default=[]): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NX584 binary sensor platform."""
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
exclude = config.get(CONF_EXCLUDE_ZONES)
zone_types = config.get(CONF_ZONE_TYPES)
try:
client = nx584_client.Client(f"http://{host}:{port}")
zones = client.list_zones()
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to NX584: %s", str(ex))
return False
version = [int(v) for v in client.get_version().split(".")]
if version < [1, 1]:
_LOGGER.error("NX584 is too old to use for sensors (>=0.2 required)")
return False
zone_sensors = {
zone["number"]: NX584ZoneSensor(
zone, zone_types.get(zone["number"], DEVICE_CLASS_OPENING)
)
for zone in zones
if zone["number"] not in exclude
}
if zone_sensors:
add_entities(zone_sensors.values())
watcher = NX584Watcher(client, zone_sensors)
watcher.start()
else:
_LOGGER.warning("No zones found on NX584")
return True
class NX584ZoneSensor(BinarySensorEntity):
"""Representation of a NX584 zone as a sensor."""
def __init__(self, zone, zone_type):
"""Initialize the nx594 binary sensor."""
self._zone = zone
self._zone_type = zone_type
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the binary sensor."""
return self._zone["name"]
@property
def is_on(self):
"""Return true if the binary sensor is on."""
# True means "faulted" or "open" or "abnormal state"
return self._zone["state"]
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return {"zone_number": self._zone["number"]}
class NX584Watcher(threading.Thread):
"""Event listener thread to process NX584 events."""
def __init__(self, client, zone_sensors):
"""Initialize NX584 watcher thread."""
super().__init__()
self.daemon = True
self._client = client
self._zone_sensors = zone_sensors
def _process_zone_event(self, event):
zone = event["zone"]
# pylint: disable=protected-access
if not (zone_sensor := self._zone_sensors.get(zone)):
return
zone_sensor._zone["state"] = event["zone_state"]
zone_sensor.schedule_update_ha_state()
def _process_events(self, events):
for event in events:
if event.get("type") == "zone_status":
self._process_zone_event(event)
def _run(self):
"""Throw away any existing events so we don't replay history."""
self._client.get_events()
while True:
events = self._client.get_events()
if events:
self._process_events(events)
def run(self):
"""Run the watcher."""
while True:
try:
self._run()
except requests.exceptions.ConnectionError:
_LOGGER.error("Failed to reach NX584 server")
time.sleep(10)
| aronsky/home-assistant | homeassistant/components/nx584/binary_sensor.py | Python | apache-2.0 | 4,476 | 0 |
# coding: utf-8
from __future__ import unicode_literals
import re
import hashlib
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
ExtractorError,
int_or_none,
float_or_none,
sanitized_Request,
urlencode_postdata,
)
class YandexMusicBaseIE(InfoExtractor):
@staticmethod
def _handle_error(response):
error = response.get('error')
if error:
raise ExtractorError(error, expected=True)
def _download_json(self, *args, **kwargs):
response = super(YandexMusicBaseIE, self)._download_json(*args, **kwargs)
self._handle_error(response)
return response
class YandexMusicTrackIE(YandexMusicBaseIE):
IE_NAME = 'yandexmusic:track'
IE_DESC = 'Яндекс.Музыка - Трек'
_VALID_URL = r'https?://music\.yandex\.(?:ru|kz|ua|by)/album/(?P<album_id>\d+)/track/(?P<id>\d+)'
_TEST = {
'url': 'http://music.yandex.ru/album/540508/track/4878838',
'md5': 'f496818aa2f60b6c0062980d2e00dc20',
'info_dict': {
'id': '4878838',
'ext': 'mp3',
'title': 'Carlo Ambrosio & Fabio Di Bari, Carlo Ambrosio - Gypsy Eyes 1',
'filesize': 4628061,
'duration': 193.04,
'track': 'Gypsy Eyes 1',
'album': 'Gypsy Soul',
'album_artist': 'Carlo Ambrosio',
'artist': 'Carlo Ambrosio & Fabio Di Bari, Carlo Ambrosio',
'release_year': '2009',
}
}
def _get_track_url(self, storage_dir, track_id):
data = self._download_json(
'http://music.yandex.ru/api/v1.5/handlers/api-jsonp.jsx?action=getTrackSrc&p=download-info/%s'
% storage_dir,
track_id, 'Downloading track location JSON')
key = hashlib.md5(('XGRlBW9FXlekgbPrRHuSiA' + data['path'][1:] + data['s']).encode('utf-8')).hexdigest()
storage = storage_dir.split('.')
return ('http://%s/get-mp3/%s/%s?track-id=%s&from=service-10-track&similarities-experiment=default'
% (data['host'], key, data['ts'] + data['path'], storage[1]))
def _get_track_info(self, track):
thumbnail = None
cover_uri = track.get('albums', [{}])[0].get('coverUri')
if cover_uri:
thumbnail = cover_uri.replace('%%', 'orig')
if not thumbnail.startswith('http'):
thumbnail = 'http://' + thumbnail
track_title = track['title']
track_info = {
'id': track['id'],
'ext': 'mp3',
'url': self._get_track_url(track['storageDir'], track['id']),
'filesize': int_or_none(track.get('fileSize')),
'duration': float_or_none(track.get('durationMs'), 1000),
'thumbnail': thumbnail,
'track': track_title,
}
def extract_artist(artist_list):
if artist_list and isinstance(artist_list, list):
artists_names = [a['name'] for a in artist_list if a.get('name')]
if artists_names:
return ', '.join(artists_names)
albums = track.get('albums')
if albums and isinstance(albums, list):
album = albums[0]
if isinstance(album, dict):
year = album.get('year')
track_info.update({
'album': album.get('title'),
'album_artist': extract_artist(album.get('artists')),
'release_year': compat_str(year) if year else None,
})
track_artist = extract_artist(track.get('artists'))
if track_artist:
track_info.update({
'artist': track_artist,
'title': '%s - %s' % (track_artist, track_title),
})
else:
track_info['title'] = track_title
return track_info
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
album_id, track_id = mobj.group('album_id'), mobj.group('id')
track = self._download_json(
'http://music.yandex.ru/handlers/track.jsx?track=%s:%s' % (track_id, album_id),
track_id, 'Downloading track JSON')['track']
return self._get_track_info(track)
class YandexMusicPlaylistBaseIE(YandexMusicBaseIE):
def _build_playlist(self, tracks):
return [
self.url_result(
'http://music.yandex.ru/album/%s/track/%s' % (track['albums'][0]['id'], track['id']))
for track in tracks if track.get('albums') and isinstance(track.get('albums'), list)]
class YandexMusicAlbumIE(YandexMusicPlaylistBaseIE):
IE_NAME = 'yandexmusic:album'
IE_DESC = 'Яндекс.Музыка - Альбом'
_VALID_URL = r'https?://music\.yandex\.(?:ru|kz|ua|by)/album/(?P<id>\d+)/?(\?|$)'
_TEST = {
'url': 'http://music.yandex.ru/album/540508',
'info_dict': {
'id': '540508',
'title': 'Carlo Ambrosio - Gypsy Soul (2009)',
},
'playlist_count': 50,
}
def _real_extract(self, url):
album_id = self._match_id(url)
album = self._download_json(
'http://music.yandex.ru/handlers/album.jsx?album=%s' % album_id,
album_id, 'Downloading album JSON')
entries = self._build_playlist(album['volumes'][0])
title = '%s - %s' % (album['artists'][0]['name'], album['title'])
year = album.get('year')
if year:
title += ' (%s)' % year
return self.playlist_result(entries, compat_str(album['id']), title)
class YandexMusicPlaylistIE(YandexMusicPlaylistBaseIE):
IE_NAME = 'yandexmusic:playlist'
IE_DESC = 'Яндекс.Музыка - Плейлист'
_VALID_URL = r'https?://music\.yandex\.(?:ru|kz|ua|by)/users/[^/]+/playlists/(?P<id>\d+)'
_TESTS = [{
'url': 'http://music.yandex.ru/users/music.partners/playlists/1245',
'info_dict': {
'id': '1245',
'title': 'Что слушают Enter Shikari',
'description': 'md5:3b9f27b0efbe53f2ee1e844d07155cc9',
},
'playlist_count': 6,
}, {
# playlist exceeding the limit of 150 tracks shipped with webpage (see
# https://github.com/rg3/youtube-dl/issues/6666)
'url': 'https://music.yandex.ru/users/ya.playlist/playlists/1036',
'info_dict': {
'id': '1036',
'title': 'Музыка 90-х',
},
'playlist_count': 310,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
mu = self._parse_json(
self._search_regex(
r'var\s+Mu\s*=\s*({.+?});\s*</script>', webpage, 'player'),
playlist_id)
playlist = mu['pageData']['playlist']
tracks, track_ids = playlist['tracks'], playlist['trackIds']
# tracks dictionary shipped with webpage is limited to 150 tracks,
# missing tracks should be retrieved manually.
if len(tracks) < len(track_ids):
present_track_ids = set([compat_str(track['id']) for track in tracks if track.get('id')])
missing_track_ids = set(map(compat_str, track_ids)) - set(present_track_ids)
request = sanitized_Request(
'https://music.yandex.ru/handlers/track-entries.jsx',
urlencode_postdata({
'entries': ','.join(missing_track_ids),
'lang': mu.get('settings', {}).get('lang', 'en'),
'external-domain': 'music.yandex.ru',
'overembed': 'false',
'sign': mu.get('authData', {}).get('user', {}).get('sign'),
'strict': 'true',
}))
request.add_header('Referer', url)
request.add_header('X-Requested-With', 'XMLHttpRequest')
missing_tracks = self._download_json(
request, playlist_id, 'Downloading missing tracks JSON', fatal=False)
if missing_tracks:
tracks.extend(missing_tracks)
return self.playlist_result(
self._build_playlist(tracks),
compat_str(playlist_id),
playlist['title'], playlist.get('description'))
| nandhp/youtube-dl | youtube_dl/extractor/yandexmusic.py | Python | unlicense | 8,328 | 0.001817 |
# Authors: Fabian Pedregosa <[email protected]>
# Alexandre Gramfort <[email protected]>
# Nelle Varoquaux <[email protected]>
# License: BSD 3 clause
import numpy as np
from scipy import interpolate
from scipy.stats import spearmanr
from .base import BaseEstimator, TransformerMixin, RegressorMixin
from .utils import as_float_array, check_array, check_consistent_length
from .utils import deprecated
from ._isotonic import _inplace_contiguous_isotonic_regression, _make_unique
import warnings
import math
__all__ = ['check_increasing', 'isotonic_regression',
'IsotonicRegression']
def check_increasing(x, y):
"""Determine whether y is monotonically correlated with x.
y is found increasing or decreasing with respect to x based on a Spearman
correlation test.
Parameters
----------
x : array-like, shape=(n_samples,)
Training data.
y : array-like, shape=(n_samples,)
Training target.
Returns
-------
increasing_bool : boolean
Whether the relationship is increasing or decreasing.
Notes
-----
The Spearman correlation coefficient is estimated from the data, and the
sign of the resulting estimate is used as the result.
In the event that the 95% confidence interval based on Fisher transform
spans zero, a warning is raised.
References
----------
Fisher transformation. Wikipedia.
https://en.wikipedia.org/wiki/Fisher_transformation
"""
# Calculate Spearman rho estimate and set return accordingly.
rho, _ = spearmanr(x, y)
increasing_bool = rho >= 0
# Run Fisher transform to get the rho CI, but handle rho=+/-1
if rho not in [-1.0, 1.0] and len(x) > 3:
F = 0.5 * math.log((1. + rho) / (1. - rho))
F_se = 1 / math.sqrt(len(x) - 3)
# Use a 95% CI, i.e., +/-1.96 S.E.
# https://en.wikipedia.org/wiki/Fisher_transformation
rho_0 = math.tanh(F - 1.96 * F_se)
rho_1 = math.tanh(F + 1.96 * F_se)
# Warn if the CI spans zero.
if np.sign(rho_0) != np.sign(rho_1):
warnings.warn("Confidence interval of the Spearman "
"correlation coefficient spans zero. "
"Determination of ``increasing`` may be "
"suspect.")
return increasing_bool
def isotonic_regression(y, sample_weight=None, y_min=None, y_max=None,
increasing=True):
"""Solve the isotonic regression model::
min sum w[i] (y[i] - y_[i]) ** 2
subject to y_min = y_[1] <= y_[2] ... <= y_[n] = y_max
where:
- y[i] are inputs (real numbers)
- y_[i] are fitted
- w[i] are optional strictly positive weights (default to 1.0)
Read more in the :ref:`User Guide <isotonic>`.
Parameters
----------
y : iterable of floating-point values
The data.
sample_weight : iterable of floating-point values, optional, default: None
Weights on each point of the regression.
If None, weight is set to 1 (equal weights).
y_min : optional, default: None
If not None, set the lowest value of the fit to y_min.
y_max : optional, default: None
If not None, set the highest value of the fit to y_max.
increasing : boolean, optional, default: True
Whether to compute ``y_`` is increasing (if set to True) or decreasing
(if set to False)
Returns
-------
y_ : list of floating-point values
Isotonic fit of y.
References
----------
"Active set algorithms for isotonic regression; A unifying framework"
by Michael J. Best and Nilotpal Chakravarti, section 3.
"""
order = np.s_[:] if increasing else np.s_[::-1]
y = np.array(y[order], dtype=np.float64)
if sample_weight is None:
sample_weight = np.ones(len(y), dtype=np.float64)
else:
sample_weight = np.array(sample_weight[order], dtype=np.float64)
_inplace_contiguous_isotonic_regression(y, sample_weight)
if y_min is not None or y_max is not None:
# Older versions of np.clip don't accept None as a bound, so use np.inf
if y_min is None:
y_min = -np.inf
if y_max is None:
y_max = np.inf
np.clip(y, y_min, y_max, y)
return y[order]
class IsotonicRegression(BaseEstimator, TransformerMixin, RegressorMixin):
"""Isotonic regression model.
The isotonic regression optimization problem is defined by::
min sum w_i (y[i] - y_[i]) ** 2
subject to y_[i] <= y_[j] whenever X[i] <= X[j]
and min(y_) = y_min, max(y_) = y_max
where:
- ``y[i]`` are inputs (real numbers)
- ``y_[i]`` are fitted
- ``X`` specifies the order.
If ``X`` is non-decreasing then ``y_`` is non-decreasing.
- ``w[i]`` are optional strictly positive weights (default to 1.0)
Read more in the :ref:`User Guide <isotonic>`.
Parameters
----------
y_min : optional, default: None
If not None, set the lowest value of the fit to y_min.
y_max : optional, default: None
If not None, set the highest value of the fit to y_max.
increasing : boolean or string, optional, default: True
If boolean, whether or not to fit the isotonic regression with y
increasing or decreasing.
The string value "auto" determines whether y should
increase or decrease based on the Spearman correlation estimate's
sign.
out_of_bounds : string, optional, default: "nan"
The ``out_of_bounds`` parameter handles how x-values outside of the
training domain are handled. When set to "nan", predicted y-values
will be NaN. When set to "clip", predicted y-values will be
set to the value corresponding to the nearest train interval endpoint.
When set to "raise", allow ``interp1d`` to throw ValueError.
Attributes
----------
X_min_ : float
Minimum value of input array `X_` for left bound.
X_max_ : float
Maximum value of input array `X_` for right bound.
f_ : function
The stepwise interpolating function that covers the domain `X_`.
Notes
-----
Ties are broken using the secondary method from Leeuw, 1977.
References
----------
Isotonic Median Regression: A Linear Programming Approach
Nilotpal Chakravarti
Mathematics of Operations Research
Vol. 14, No. 2 (May, 1989), pp. 303-308
Isotone Optimization in R : Pool-Adjacent-Violators
Algorithm (PAVA) and Active Set Methods
Leeuw, Hornik, Mair
Journal of Statistical Software 2009
Correctness of Kruskal's algorithms for monotone regression with ties
Leeuw, Psychometrica, 1977
"""
def __init__(self, y_min=None, y_max=None, increasing=True,
out_of_bounds='nan'):
self.y_min = y_min
self.y_max = y_max
self.increasing = increasing
self.out_of_bounds = out_of_bounds
@property
@deprecated("Attribute ``X_`` is deprecated in version 0.18 and will be"
" removed in version 0.20.")
def X_(self):
return self._X_
@X_.setter
def X_(self, value):
self._X_ = value
@X_.deleter
def X_(self):
del self._X_
@property
@deprecated("Attribute ``y_`` is deprecated in version 0.18 and will"
" be removed in version 0.20.")
def y_(self):
return self._y_
@y_.setter
def y_(self, value):
self._y_ = value
@y_.deleter
def y_(self):
del self._y_
def _check_fit_data(self, X, y, sample_weight=None):
if len(X.shape) != 1:
raise ValueError("X should be a 1d array")
def _build_f(self, X, y):
"""Build the f_ interp1d function."""
# Handle the out_of_bounds argument by setting bounds_error
if self.out_of_bounds not in ["raise", "nan", "clip"]:
raise ValueError("The argument ``out_of_bounds`` must be in "
"'nan', 'clip', 'raise'; got {0}"
.format(self.out_of_bounds))
bounds_error = self.out_of_bounds == "raise"
if len(y) == 1:
# single y, constant prediction
self.f_ = lambda x: y.repeat(x.shape)
else:
self.f_ = interpolate.interp1d(X, y, kind='linear',
bounds_error=bounds_error)
def _build_y(self, X, y, sample_weight, trim_duplicates=True):
"""Build the y_ IsotonicRegression."""
check_consistent_length(X, y, sample_weight)
X, y = [check_array(x, ensure_2d=False) for x in [X, y]]
y = as_float_array(y)
self._check_fit_data(X, y, sample_weight)
# Determine increasing if auto-determination requested
if self.increasing == 'auto':
self.increasing_ = check_increasing(X, y)
else:
self.increasing_ = self.increasing
# If sample_weights is passed, removed zero-weight values and clean
# order
if sample_weight is not None:
sample_weight = check_array(sample_weight, ensure_2d=False)
mask = sample_weight > 0
X, y, sample_weight = X[mask], y[mask], sample_weight[mask]
else:
sample_weight = np.ones(len(y))
order = np.lexsort((y, X))
X, y, sample_weight = [array[order].astype(np.float64, copy=False)
for array in [X, y, sample_weight]]
unique_X, unique_y, unique_sample_weight = _make_unique(
X, y, sample_weight)
# Store _X_ and _y_ to maintain backward compat during the deprecation
# period of X_ and y_
self._X_ = X = unique_X
self._y_ = y = isotonic_regression(unique_y, unique_sample_weight,
self.y_min, self.y_max,
increasing=self.increasing_)
# Handle the left and right bounds on X
self.X_min_, self.X_max_ = np.min(X), np.max(X)
if trim_duplicates:
# Remove unnecessary points for faster prediction
keep_data = np.ones((len(y),), dtype=bool)
# Aside from the 1st and last point, remove points whose y values
# are equal to both the point before and the point after it.
keep_data[1:-1] = np.logical_or(
np.not_equal(y[1:-1], y[:-2]),
np.not_equal(y[1:-1], y[2:])
)
return X[keep_data], y[keep_data]
else:
# The ability to turn off trim_duplicates is only used to it make
# easier to unit test that removing duplicates in y does not have
# any impact the resulting interpolation function (besides
# prediction speed).
return X, y
def fit(self, X, y, sample_weight=None):
"""Fit the model using X, y as training data.
Parameters
----------
X : array-like, shape=(n_samples,)
Training data.
y : array-like, shape=(n_samples,)
Training target.
sample_weight : array-like, shape=(n_samples,), optional, default: None
Weights. If set to None, all weights will be set to 1 (equal
weights).
Returns
-------
self : object
Returns an instance of self.
Notes
-----
X is stored for future use, as `transform` needs X to interpolate
new input data.
"""
# Transform y by running the isotonic regression algorithm and
# transform X accordingly.
X, y = self._build_y(X, y, sample_weight)
# It is necessary to store the non-redundant part of the training set
# on the model to make it possible to support model persistence via
# the pickle module as the object built by scipy.interp1d is not
# picklable directly.
self._necessary_X_, self._necessary_y_ = X, y
# Build the interpolation function
self._build_f(X, y)
return self
def transform(self, T):
"""Transform new data by linear interpolation
Parameters
----------
T : array-like, shape=(n_samples,)
Data to transform.
Returns
-------
T_ : array, shape=(n_samples,)
The transformed data
"""
T = as_float_array(T)
if len(T.shape) != 1:
raise ValueError("Isotonic regression input should be a 1d array")
# Handle the out_of_bounds argument by clipping if needed
if self.out_of_bounds not in ["raise", "nan", "clip"]:
raise ValueError("The argument ``out_of_bounds`` must be in "
"'nan', 'clip', 'raise'; got {0}"
.format(self.out_of_bounds))
if self.out_of_bounds == "clip":
T = np.clip(T, self.X_min_, self.X_max_)
return self.f_(T)
def predict(self, T):
"""Predict new data by linear interpolation.
Parameters
----------
T : array-like, shape=(n_samples,)
Data to transform.
Returns
-------
T_ : array, shape=(n_samples,)
Transformed data.
"""
return self.transform(T)
def __getstate__(self):
"""Pickle-protocol - return state of the estimator. """
state = super(IsotonicRegression, self).__getstate__()
# remove interpolation method
state.pop('f_', None)
return state
def __setstate__(self, state):
"""Pickle-protocol - set state of the estimator.
We need to rebuild the interpolation function.
"""
super(IsotonicRegression, self).__setstate__(state)
if hasattr(self, '_necessary_X_') and hasattr(self, '_necessary_y_'):
self._build_f(self._necessary_X_, self._necessary_y_)
| mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/sklearn/isotonic.py | Python | mit | 14,061 | 0 |
# Copyright 2013 VMware, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import l3_ext_gw_mode as apidef
from neutron_lib.api import extensions
class L3_ext_gw_mode(extensions.APIExtensionDescriptor):
api_definition = apidef
| noironetworks/neutron | neutron/extensions/l3_ext_gw_mode.py | Python | apache-2.0 | 820 | 0 |
# encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
from .int import int
class KColorChooserMode(int):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__dict__ = None # (!) real value is ''
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdeui/KColorChooserMode.py | Python | gpl-2.0 | 508 | 0.009843 |
"""Script to build the xpi add-in for firefox
Usage : python build-implicit-wait.py "x.x.x.x"
"""
import os, re, sys, shutil, datetime, zipfile, glob
CD = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = CD + r'\implicit-wait'
OUT_DIR = CD + r'\bin'
RDF_PATH = CD + r'\implicit-wait\install.rdf'
def main(args):
arg_version = args and args[0]
set_working_dir(CD)
last_modified_time = get_file_mtime(RDF_PATH, '%Y-%m-%d %H:%M:%S')
current_version = find_in_file(RDF_PATH, r'version>([.\d]+)<');
print __doc__
print 'Last compilation : ' + (last_modified_time or 'none')
print 'Current Version : ' + current_version
new_version = arg_version or get_input_version(current_version)
print 'New version : ' + new_version + '\n'
print 'Update version number ...'
replace_in_file(RDF_PATH, r'(?<=version>)[.\d]+(?=<)', new_version)
print 'Build formater xpi ...'
make_dir(OUT_DIR)
set_working_dir(SRC_DIR)
with ZipFile(OUT_DIR + r'\implicit-wait.xpi', 'w') as zip:
zip.add(r'*')
print '\nDone'
def set_working_dir(directory):
make_dir(directory)
os.chdir(directory)
def make_dir(directory):
if not os.path.isdir(directory):
os.makedirs(directory)
def clear_dir(directory):
if os.path.isdir(directory):
shutil.rmtree(directory)
os.makedirs(directory)
def get_file_mtime(filepath, format=None):
if(not os.path.isfile(filepath)):
return None
dt = datetime.datetime.fromtimestamp(os.path.getmtime(filepath))
if format:
return dt.strftime(format)
return dt
def delete_file(filepath):
if(os.path.isfile(filepath)):
os.remove(filepath)
def find_in_file(filepath, pattern):
with open(filepath, 'r') as f:
result = re.search(pattern, f.read())
return result.group(result.re.groups)
def replace_in_file(filepath, pattern, replacement):
with open(filepath, 'r') as f:
text = re.sub(pattern, replacement, f.read())
with open(filepath, 'w') as f:
f.write(text)
def get_input(message):
try: return raw_input(message)
except NameError: return input(message)
def get_input_version(version):
while True:
input = get_input('Digit to increment [w.x.y.z] or version [0.0.0.0] or skip [s] ? ').strip()
if re.match(r's|w|x|y|z', input) :
idx = {'s': 99, 'w': 0, 'x': 1, 'y': 2, 'z': 3}[input]
return '.'.join([str((int(v)+(i == idx))*(i <= idx)) for i, v in enumerate(version.split('.'))])
elif re.match(r'\d+\.\d+\.\d+\.\d+', input):
return input
class ZipFile(zipfile.ZipFile):
def __init__(cls, file, mode):
zipfile.ZipFile.__init__(cls, file, mode)
def add(self, path):
for item in glob.glob(path):
if os.path.isdir(item):
self.add(item + r'\*');
else:
self.write(item)
if __name__ == '__main__':
main(sys.argv[1:])
| florentbr/SeleniumBasic | FirefoxAddons/build-implicit-wait.py | Python | bsd-3-clause | 3,005 | 0.009318 |
from flask import Blueprint, render_template, session, redirect, url_for, request, flash, g, jsonify, abort
#from flask_login import requires_login
admin_order = Blueprint('admin_order', __name__)
@admin_order.route('/')
def index():
pass
@admin_order.route('/new', methods=['GET', 'POST'])
def new():
pass
@admin_order.route('/edit', methods=['GET', 'POST'])
def edit():
pass
| friendly-of-python/flask-online-store | flask_online_store/views/admin/order.py | Python | mit | 396 | 0.005051 |
from common.common_consts.telem_categories import TelemCategoryEnum
from infection_monkey.telemetry.base_telem import BaseTelem
class ScanTelem(BaseTelem):
def __init__(self, machine):
"""
Default scan telemetry constructor
:param machine: Scanned machine
"""
super(ScanTelem, self).__init__()
self.machine = machine
telem_category = TelemCategoryEnum.SCAN
def get_data(self):
return {"machine": self.machine.as_dict(), "service_count": len(self.machine.services)}
| guardicore/monkey | monkey/infection_monkey/telemetry/scan_telem.py | Python | gpl-3.0 | 537 | 0.001862 |
import openerp.addons.website.tests.test_ui as test_ui
def load_tests(loader, base, _):
base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-add_product-test.js'),
{'redirect': '/page/website.homepage'}))
base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-sale_process-test.js'),
{'redirect': '/page/website.homepage'}))
base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-sale_process-test.js'),
{'redirect': '/page/website.homepage', 'user': 'demo', 'password': 'demo'}))
# Test has been commented in SAAS-3 ONLY, it must be activated in trunk.
# Log for test JS has been improved in trunk, so we stop to loss time in saas-3 and debug it directly in trunk.
# Tech Saas & AL agreement
# base.addTest(test_ui.WebsiteUiSuite(test_ui.full_path(__file__,'website_sale-sale_process-test.js'), {'path': '/', 'user': None}))
return base | browseinfo/odoo_saas3_nicolas | addons/website_sale/tests/test_ui.py | Python | agpl-3.0 | 963 | 0.015576 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-09-09 07:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0002_auto_20160829_1730'),
]
operations = [
migrations.AlterField(
model_name='page',
name='content',
field=models.TextField(blank=True, default='', null=True),
),
]
| fidals/refarm-site | pages/migrations/0003_auto_20160909_0747.py | Python | mit | 467 | 0 |
"""
Copyright 2017 Ryan Wick ([email protected])
https://github.com/rrwick/Unicycler
This module contains functions relating to BLAST, which Unicycler uses to rotate completed circular
replicons to a standard starting point.
This file is part of Unicycler. Unicycler is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by the Free Software Foundation,
either version 3 of the License, or (at your option) any later version. Unicycler is distributed in
the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details. You should have received a copy of the GNU General Public License along with Unicycler. If
not, see <http://www.gnu.org/licenses/>.
"""
import os
import subprocess
from .misc import load_fasta
from . import log
class CannotFindStart(Exception):
pass
def find_start_gene(sequence, start_genes_fasta, identity_threshold, coverage_threshold, blast_dir,
makeblastdb_path, tblastn_path):
"""
This function uses tblastn to look for start genes in the sequence. It returns the first gene
(using the order in the file) which meets the identity and coverage thresholds, as well as
the position of that gene (including which strand it is on).
This function assumes that the sequence is circular with no overlap.
"""
# Prepare the replicon sequence. In order to get a solid, single BLAST hit in cases where the
# gene overlaps from the end to the start, we have to duplicate some of the replicon sequence
# for the BLAST database.
seq_len = len(sequence)
start_genes_fasta = os.path.abspath(start_genes_fasta)
queries = load_fasta(start_genes_fasta)
if not queries:
raise CannotFindStart
longest_query = max(len(x[1]) for x in queries)
longest_query *= 3 # amino acids to nucleotides
dup_length = min(seq_len, longest_query)
sequence = sequence + sequence[:dup_length]
# BLAST has serious issues with paths that contain spaces. This page explains some of it:
# https://www.ncbi.nlm.nih.gov/books/NBK279669/
# But I couldn't make it all work for makeblastdb (spaces made it require -out, and it never
# accepted spaces in the -out path, no matter how I used quotes). So we will just move into the
# temporary directory to run the BLAST commands.
starting_dir = os.getcwd()
os.chdir(blast_dir)
# Create a FASTA file of the replicon sequence.
replicon_fasta_filename = 'replicon.fasta'
replicon_fasta = open(replicon_fasta_filename, 'w')
replicon_fasta.write('>replicon\n')
replicon_fasta.write(sequence)
replicon_fasta.write('\n')
replicon_fasta.close()
# Build the BLAST database.
command = [makeblastdb_path, '-dbtype', 'nucl', '-in', replicon_fasta_filename]
log.log(' ' + ' '.join(command), 2)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, err = process.communicate()
if err:
log.log('\nmakeblastdb encountered an error:\n' + err.decode())
os.chdir(starting_dir)
raise CannotFindStart
# Run the tblastn search.
command = [tblastn_path, '-db', replicon_fasta_filename, '-query', start_genes_fasta, '-outfmt',
'6 qseqid sstart send pident qlen qseq qstart bitscore', '-num_threads', '1']
log.log(' ' + ' '.join(command), 2)
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
blast_out, blast_err = process.communicate()
process.wait()
if blast_err:
log.log('\nBLAST encountered an error:\n' + blast_err.decode())
# Find the best hit in the results.
best_hit, best_bitscore = None, 0
for line in blast_out.decode().splitlines():
hit = BlastHit(line, seq_len)
if hit.pident >= identity_threshold and hit.query_cov >= coverage_threshold and \
hit.qstart == 0 and hit.bitscore > best_bitscore:
best_hit = hit
best_bitscore = hit.bitscore
os.chdir(starting_dir)
if best_bitscore:
return best_hit
else:
raise CannotFindStart
class BlastHit(object):
def __init__(self, blast_line, seq_len):
self.qseqid = ''
self.pident, self.qstart, self.bitscore, self.query_cov, self.start_pos = 0, 0, 0, 0, 0
self.flip = False
parts = blast_line.strip().split('\t')
if len(parts) > 7:
self.qseqid = parts[0]
self.pident = float(parts[3])
self.qstart = int(parts[6]) - 1
self.bitscore = float(parts[7])
sstart = int(parts[1]) - 1
send = int(parts[2])
qlen = float(parts[4])
qseq = parts[5]
self.query_cov = 100.0 * len(qseq) / qlen
if sstart <= send:
self.start_pos = sstart
self.flip = False
else:
self.start_pos = sstart + 1
self.flip = True
if self.start_pos >= seq_len:
self.start_pos -= seq_len
def __repr__(self):
return 'BLAST hit: query=' + self.qseqid + ', subject start=' + str(self.start_pos) + \
', strand=' + ('reverse' if self.flip else 'forward') + ', ID=' + \
str(self.pident) + ', cov=' + str(self.query_cov) + ', bitscore=' + \
str(self.bitscore)
| rrwick/Unicycler | unicycler/blast_func.py | Python | gpl-3.0 | 5,533 | 0.004518 |
# -*- encoding: utf-8 -*-
from shapely.wkt import loads as wkt_loads
import dsl
from . import FixtureTest
class ZoosZ13(FixtureTest):
def test_zoo_appears_at_z13(self):
# Zoo Montana, Billings, MT
self.generate_fixtures(dsl.way(2274329294, wkt_loads('POINT (-108.620965329915 45.7322965681428)'), {u'addr:housenumber': u'2100', u'name': u'Zoo Montana', u'addr:city': u'Billings, MT 59106', u'source': u'openstreetmap.org', u'tourism': u'zoo', u'addr:street': u'S. Shiloh Road'})) # noqa
self.assert_has_feature(
13, 1624, 2923, 'pois',
{'kind': 'zoo'})
| mapzen/vector-datasource | integration-test/421-zoos-z13.py | Python | mit | 609 | 0 |
import csv
from giantcellsim_trial import giantcellsim_trial
import itertools
import numpy
def flatten(items, seqtypes=(list, tuple)): # used for flattening lists
for i, x in enumerate(items):
while isinstance(items[i], seqtypes):
items[i:i+1] = items[i]
return items
def giantcellsim_motifoutput(parameterlist,masterprefix,testprefix,trials,growthIterations,max_strand_nr,maxStrandLength,numCells,numRounds,motif,elong,bias):
pop_tracker = []
with open(masterprefix+ testprefix +'_MotifData_motif{motif}_len{maxStrandLength}_bias{bias}_elong{elong}_{trials}trials_numRound{numRounds}.csv'.format(motif = motif, maxStrandLength = maxStrandLength, bias=bias, elong=elong, trials=trials, numRounds=numRounds), 'wb') as f:
writer = csv.writer(f)
writer.writerow(parameterlist)
for trial in range(trials):
pop_tracker.append([])
nr_motifs, nr_strands, nr_cells_with_motif, pop_tracker[trial] = giantcellsim_trial(motif,growthIterations,max_strand_nr,maxStrandLength,numCells,numRounds,elong,bias)
motif_freq = [motifs / float(total) for motifs,total in itertools.izip(nr_motifs,nr_strands)]
strands_freq = [strands / float(max_strand_nr*numCells) for strands in nr_strands]
cells_with_freq = [cells / float(numCells) for cells in nr_cells_with_motif]
writer.writerow(motif_freq)
writer.writerow(strands_freq)
writer.writerow(cells_with_freq)
if trial == 0:
motif_freq_aggregate = motif_freq
strands_freq_aggregate = strands_freq
cells_with_freq_aggregate = cells_with_freq
nr_strands_per_time = nr_strands
else:
motif_freq_aggregate = [list(round_data) for round_data in zip(motif_freq_aggregate,motif_freq)]
strands_freq_aggregate = [list(round_data) for round_data in zip(strands_freq_aggregate,strands_freq)]
cells_with_freq_aggregate = [list(round_data) for round_data in zip(cells_with_freq_aggregate,cells_with_freq)]
nr_strands_per_time = [list(round_data) for round_data in zip(nr_strands_per_time,nr_strands)]
for time_point in range(numRounds):
motif_freq_aggregate[time_point] = flatten(motif_freq_aggregate[time_point])
strands_freq_aggregate[time_point] = flatten(strands_freq_aggregate[time_point])
cells_with_freq_aggregate[time_point] = flatten(cells_with_freq_aggregate[time_point])
nr_strands_per_time[time_point] = flatten(nr_strands_per_time[time_point])
means = []
stdevs = []
for iterator in range(3):
means.append([])
stdevs.append([])
for time_point in range(numRounds):
means[0].append(numpy.mean(motif_freq_aggregate[time_point]))
stdevs[0].append(numpy.std(motif_freq_aggregate[time_point]))
means[1].append(numpy.mean(strands_freq_aggregate[time_point]))
stdevs[1].append(numpy.std(strands_freq_aggregate[time_point]))
means[2].append(numpy.mean(cells_with_freq_aggregate[time_point]))
stdevs[2].append(numpy.std(cells_with_freq_aggregate[time_point]))
for mean_data in means:
writer.writerow(mean_data)
for stdev_data in stdevs:
writer.writerow(stdev_data)
f.close()
return pop_tracker, nr_strands_per_time
| kins912/giantcellsim | giantcellsim_motifoutput.py | Python | mit | 3,105 | 0.02963 |
import os
import shutil
import logging
from collections import OrderedDict
from mock import patch
from django.conf import settings
from django.test import TestCase
log = logging.getLogger(__name__)
class RTDTestCase(TestCase):
def setUp(self):
self.cwd = os.path.dirname(__file__)
self.build_dir = os.path.join(self.cwd, 'builds')
log.info("build dir: %s" % self.build_dir)
if not os.path.exists(self.build_dir):
os.makedirs(self.build_dir)
settings.DOCROOT = self.build_dir
def tearDown(self):
shutil.rmtree(self.build_dir)
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
class MockBuildTestCase(TestCase):
'''Mock build triggers for test cases'''
pass
class WizardTestCase(TestCase):
'''Test case for testing wizard forms'''
step_data = OrderedDict({})
url = None
wizard_class_slug = None
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
@patch('readthedocs.projects.views.private.trigger_build', lambda x, basic: None)
def post_step(self, step, **data):
'''Post step form data to `url`, using supplimentary `kwargs`
Use data from kwargs to build dict to pass into form
'''
if not self.url:
raise Exception('Missing wizard URL')
try:
data = {}
for key in self.step_data:
data.update({('{0}-{1}'.format(key, k), v)
for (k, v) in self.step_data[key].items()})
if key == step:
break
except KeyError:
pass
# Update with prefixed step data
data['{0}-current_step'.format(self.wizard_class_slug)] = step
resp = self.client.post(self.url, data)
self.assertIsNotNone(resp)
return resp
# We use camelCase on purpose here to conform with unittest's naming
# conventions.
def assertWizardResponse(self, response, step=None): # noqa
'''Assert successful wizard response'''
# Is is the last form
if step is None:
try:
wizard = response.context['wizard']
self.assertEqual(wizard['form'].errors, {})
except (TypeError, KeyError):
pass
self.assertEqual(response.status_code, 302)
else:
self.assertIn('wizard', response.context)
wizard = response.context['wizard']
try:
self.assertEqual(wizard['form'].errors, {})
except AssertionError:
self.assertIsNone(wizard['form'].errors)
self.assertEqual(response.status_code, 200)
self.assertIsNotNone(response.context['wizard'])
self.assertEqual(wizard['steps'].current, step)
self.assertIn('{0}-current_step'.format(self.wizard_class_slug),
response.content)
# We use camelCase on purpose here to conform with unittest's naming
# conventions.
def assertWizardFailure(self, response, field, match=None): # noqa
'''Assert field threw a validation error
response
Client response object
field
Field name to test for validation error
match
Regex match for field validation error
'''
self.assertEqual(response.status_code, 200)
self.assertIn('wizard', response.context)
self.assertIn('form', response.context['wizard'])
self.assertIn(field, response.context['wizard']['form'].errors)
if match is not None:
error = response.context['wizard']['form'].errors[field]
self.assertRegexpMatches(unicode(error), match)
| GovReady/readthedocs.org | readthedocs/rtd_tests/base.py | Python | mit | 3,856 | 0.001037 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import datetime
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('gym', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='gym',
name='email',
field=models.EmailField(max_length=254, verbose_name='Email', blank=True, null=True),
),
migrations.AlterField(
model_name='gym',
name='zip_code',
field=models.CharField(max_length=10, verbose_name='ZIP code', blank=True, null=True),
),
migrations.AlterField(
model_name='gymconfig',
name='weeks_inactive',
field=models.PositiveIntegerField(help_text='Number of weeks since the last time a user logged his presence to be considered inactive', default=4, verbose_name='Reminder inactive members'),
),
]
| petervanderdoes/wger | wger/gym/migrations/0002_auto_20151003_1944.py | Python | agpl-3.0 | 1,061 | 0.002828 |
# -*- coding: utf-8 -*-
# Geocluster - A simple and naive geo cluster
# (c) Régis FLORET 2014 and later
#
def convert_lat_from_gps(value):
"""
Convert a lattitude from GPS coordinate to decimal degrees
:param value: The lattitude as a float between 0 and -90
:return: The lattitude in decimal degrees
"""
assert (isinstance(value, (int, float)))
return value if value > 0 else 90 + abs(value)
def convert_lng_from_gps(value):
"""
Convert a longitude from GPS coordinate to decimal degrees
:param value: The longitude as a float
:return: The longitude in decimal degrees
"""
assert (isinstance(value, (int, float)))
return value if value > 0 else 180 + abs(value)
def convert_lat_from_degrees(value):
"""
Convert a longitude from decimal degrees to GPS coordinate
:param value: The longitude as a float
:return: The longitude in GPS coordinate
"""
assert (isinstance(value, (int, float)))
if value > 180:
raise ValueError("Lattitude in degrees can't be greater than 180")
elif value < 0:
raise ValueError("Lattitude in degrees can't be lesser than 0")
return value if value < 90 else 90 - value
def convert_lng_from_degrees(value):
"""
Convert a longitude from decimal degrees to GPS coordinate
:param value: The longitude as a float
:return: The longitude in GPS coordinate
"""
assert (isinstance(value, (int, float)))
if value > 180:
raise ValueError("Lattitude in degrees can't be greater than 180")
elif value < 0:
raise ValueError("Lattitude in degrees can't be lesser than 0")
return value if value < 180 else 180 - value
| regisf/geocluster | geocluster/geoconvertion.py | Python | mit | 1,701 | 0 |
#!/usr/bin/env python
# pip installs packages in editable mode using pip_install.py
#
# cryptography is currently using this script in their CI at
# https://github.com/pyca/cryptography/blob/a02fdd60d98273ca34427235c4ca96687a12b239/.travis/downstream.d/certbot.sh#L8-L9.
# We should try to remember to keep their repo updated if we make any changes
# to this script which may break things for them.
import sys
import pip_install
def main(args):
new_args = []
for arg in args:
new_args.append('-e')
new_args.append(arg)
pip_install.main(new_args)
if __name__ == '__main__':
main(sys.argv[1:])
| letsencrypt/letsencrypt | tools/pip_install_editable.py | Python | apache-2.0 | 629 | 0.00159 |
from django.db import migrations
from corehq.apps.smsbillables.management.commands.bootstrap_gateway_fees import (
bootstrap_pinpoint_gateway,
)
def add_pinpoint_gateway_fee_for_migration(apps, schema_editor):
bootstrap_pinpoint_gateway(apps)
class Migration(migrations.Migration):
dependencies = [
('smsbillables', '0021_infobip_gateway_fee_amount_null'),
]
operations = [
migrations.RunPython(add_pinpoint_gateway_fee_for_migration),
]
| dimagi/commcare-hq | corehq/apps/smsbillables/migrations/0022_pinpoint_gateway_fee_amount_null.py | Python | bsd-3-clause | 485 | 0.002062 |
# encoding: utf-8
# module gtk._gtk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.135
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
from Misc import Misc
class Arrow(Misc):
"""
Object GtkArrow
Properties from GtkArrow:
arrow-type -> GtkArrowType: Arrow direction
The direction the arrow should point
shadow-type -> GtkShadowType: Arrow shadow
Appearance of the shadow surrounding the arrow
Properties from GtkMisc:
xalign -> gfloat: X align
The horizontal alignment, from 0 (left) to 1 (right). Reversed for RTL layouts.
yalign -> gfloat: Y align
The vertical alignment, from 0 (top) to 1 (bottom)
xpad -> gint: X pad
The amount of space to add on the left and right of the widget, in pixels
ypad -> gint: Y pad
The amount of space to add on the top and bottom of the widget, in pixels
Signals from GtkWidget:
composited-changed ()
show ()
hide ()
map ()
unmap ()
realize ()
unrealize ()
size-request (GtkRequisition)
size-allocate (GdkRectangle)
state-changed (GtkStateType)
parent-set (GtkWidget)
hierarchy-changed (GtkWidget)
style-set (GtkStyle)
direction-changed (GtkTextDirection)
grab-notify (gboolean)
child-notify (GParam)
mnemonic-activate (gboolean) -> gboolean
grab-focus ()
focus (GtkDirectionType) -> gboolean
move-focus (GtkDirectionType)
event (GdkEvent) -> gboolean
event-after (GdkEvent)
button-press-event (GdkEvent) -> gboolean
button-release-event (GdkEvent) -> gboolean
scroll-event (GdkEvent) -> gboolean
motion-notify-event (GdkEvent) -> gboolean
keynav-failed (GtkDirectionType) -> gboolean
delete-event (GdkEvent) -> gboolean
destroy-event (GdkEvent) -> gboolean
expose-event (GdkEvent) -> gboolean
key-press-event (GdkEvent) -> gboolean
key-release-event (GdkEvent) -> gboolean
enter-notify-event (GdkEvent) -> gboolean
leave-notify-event (GdkEvent) -> gboolean
configure-event (GdkEvent) -> gboolean
focus-in-event (GdkEvent) -> gboolean
focus-out-event (GdkEvent) -> gboolean
map-event (GdkEvent) -> gboolean
unmap-event (GdkEvent) -> gboolean
property-notify-event (GdkEvent) -> gboolean
selection-clear-event (GdkEvent) -> gboolean
selection-request-event (GdkEvent) -> gboolean
selection-notify-event (GdkEvent) -> gboolean
selection-received (GtkSelectionData, guint)
selection-get (GtkSelectionData, guint, guint)
proximity-in-event (GdkEvent) -> gboolean
proximity-out-event (GdkEvent) -> gboolean
drag-leave (GdkDragContext, guint)
drag-begin (GdkDragContext)
drag-end (GdkDragContext)
drag-data-delete (GdkDragContext)
drag-failed (GdkDragContext, GtkDragResult) -> gboolean
drag-motion (GdkDragContext, gint, gint, guint) -> gboolean
drag-drop (GdkDragContext, gint, gint, guint) -> gboolean
drag-data-get (GdkDragContext, GtkSelectionData, guint, guint)
drag-data-received (GdkDragContext, gint, gint, GtkSelectionData, guint, guint)
visibility-notify-event (GdkEvent) -> gboolean
client-event (GdkEvent) -> gboolean
no-expose-event (GdkEvent) -> gboolean
window-state-event (GdkEvent) -> gboolean
damage-event (GdkEvent) -> gboolean
grab-broken-event (GdkEvent) -> gboolean
query-tooltip (gint, gint, gboolean, GtkTooltip) -> gboolean
popup-menu () -> gboolean
show-help (GtkWidgetHelpType) -> gboolean
accel-closures-changed ()
screen-changed (GdkScreen)
can-activate-accel (guint) -> gboolean
Properties from GtkWidget:
name -> gchararray: Widget name
The name of the widget
parent -> GtkContainer: Parent widget
The parent widget of this widget. Must be a Container widget
width-request -> gint: Width request
Override for width request of the widget, or -1 if natural request should be used
height-request -> gint: Height request
Override for height request of the widget, or -1 if natural request should be used
visible -> gboolean: Visible
Whether the widget is visible
sensitive -> gboolean: Sensitive
Whether the widget responds to input
app-paintable -> gboolean: Application paintable
Whether the application will paint directly on the widget
can-focus -> gboolean: Can focus
Whether the widget can accept the input focus
has-focus -> gboolean: Has focus
Whether the widget has the input focus
is-focus -> gboolean: Is focus
Whether the widget is the focus widget within the toplevel
can-default -> gboolean: Can default
Whether the widget can be the default widget
has-default -> gboolean: Has default
Whether the widget is the default widget
receives-default -> gboolean: Receives default
If TRUE, the widget will receive the default action when it is focused
composite-child -> gboolean: Composite child
Whether the widget is part of a composite widget
style -> GtkStyle: Style
The style of the widget, which contains information about how it will look (colors etc)
events -> GdkEventMask: Events
The event mask that decides what kind of GdkEvents this widget gets
extension-events -> GdkExtensionMode: Extension events
The mask that decides what kind of extension events this widget gets
no-show-all -> gboolean: No show all
Whether gtk_widget_show_all() should not affect this widget
has-tooltip -> gboolean: Has tooltip
Whether this widget has a tooltip
tooltip-markup -> gchararray: Tooltip markup
The contents of the tooltip for this widget
tooltip-text -> gchararray: Tooltip Text
The contents of the tooltip for this widget
window -> GdkWindow: Window
The widget's window if it is realized
double-buffered -> gboolean: Double Buffered
Whether or not the widget is double buffered
Signals from GtkObject:
destroy ()
Properties from GtkObject:
user-data -> gpointer: User Data
Anonymous User Data Pointer
Signals from GObject:
notify (GParam)
"""
def set(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/gtk/_gtk/Arrow.py | Python | gpl-2.0 | 6,743 | 0.002818 |
import datetime
import bcrypt
import rethinkdb as r
from sondra.api.expose import expose_method, expose_method_explicit
from sondra.auth.decorators import authorized_method, authorization_required, authentication_required, anonymous_method
from sondra.collection import Collection
from .documents import Credentials, Role, User, LoggedInUser, IssuedToken
@authorization_required('write')
@authentication_required('read')
class Roles(Collection):
primary_key = 'slug'
document_class = Role
autocomplete_props = ('title', 'description')
template = '${title}'
class UserCredentials(Collection):
primary_key = 'user'
document_class = Credentials
private = True
@authorization_required('write')
class Users(Collection):
document_class = User
primary_key = 'username'
indexes = ['email']
order_by = ('family_name', 'given_name', 'username')
def __init__(self, application):
super(Users, self).__init__(application)
# if '__anonymous__' not in self:
# self.create_user('__anonymous__', '', '', active=False)
#
# self._anonymous_user = self['__anonymous__']
#
@property
def anonymous(self):
return None # self._anonymous_user
def validate_password(self, password):
"""Validate that the desired password is strong enough to use.
Override this in a subclass if you want stronger controls on the password. This version
of the function only makes sure that the password has a minimum length of 8.
Args:
password (str): The password to use
Returns:
None
Raises:
ValueError if the password doesn't pass muster.
"""
if len(password) < 6:
raise ValueError("Password too short")
def user_data(self,
username: str,
email: str,
locale: str='en-US',
password: str=None,
family_name: str=None,
given_name: str=None,
names: list=None,
active: bool=True,
roles: list=None,
confirmed_email: bool=False
) -> str:
"""Create a new user
Args:
username (str): The username to use. Can be blank. If blank the username is the email.
password (str): The password to use.
email (str): The email address for the user. Should be unique
locale (str="en-US"): The name of the locale for the user
family_name (str): The user's family name
given_name (str): The user's given name
names (str): The user's middle name
active (bool): Default true. Whether or not the user is allowed to log in.
roles (list[roles]): List of role objects or urls. The list of roles a user is granted.
confirmed_email (bool): Default False. The user has confirmed their email address already.
Returns:
str: The url of the new user object.
Raises:
KeyError if the user already exists.
ValueError if the user's password does not pass muster.
"""
email = email.lower()
if not password:
active=False
password=''
if not username:
username = email
if username in self:
raise PermissionError("Attempt to create duplicate user " + username)
user = {
"username": username,
"email": email,
"email_verified": False,
"active": active if active is not None else self.document_class.active_by_default,
"locale": locale,
"created": datetime.datetime.now(),
"roles": roles or [],
"confirmed_email": confirmed_email
}
if family_name:
user['family_name'] = family_name
if given_name:
user['given_name'] = given_name
if names:
user['names'] = names
credentials = None
if active and password:
self.validate_password(password)
salt = bcrypt.gensalt()
secret = bcrypt.gensalt(16)
hashed_password = bcrypt.hashpw(password.encode('utf-8'), salt)
credentials = {
'password': hashed_password.decode('utf-8'),
'salt': salt.decode('utf-8'),
'secret': secret.decode('utf-8')
}
return user, credentials
@authorized_method
@expose_method_explicit(
title="Create User",
side_effects=True,
request_schema={
"type": "object",
"required": ['username', 'email'],
"description": "Create a new user in the system",
"properties": {
"username": {"type": "string", "title": "Username", "description": "The new username"},
"email": {"type": "string", "title": "email", "description": "The user's email"},
"locale": {"type": "string", "title": "Locale", "description": "The user's default language setting", "default": "en-US"}, #, "format": "locale"},
"password": {"type": "string", "title": "Password", "description": "The user's password. Leave blank to have it auto-generated."},
"family_name": {"type": "string", "title": "Family Name", "description": "The user's password"},
"given_name": {"type": "string", "title": "Given Name", "description": "The user's password"},
"names": {"type": "string", "title": "Middle Name(s)", "description": "The user's middle names"},
"active": {"type": "boolean", "title": "Can Login", "description": "The user can login", "default": True},
"roles": {"type": "array", "title": "Roles", "items": {"type": "string", "fk": "/auth/roles"}, "description": "The roles to assign to the new user.", "default": []},
"confirmed_email": {"type": "boolean", "default": False, "title": "Confirmed", "description": "Whether or not the user has confirmed their email address."}
}
},
response_schema={
"type": "object",
"properties": {"_": {"type": "string", "description": "The new user's URL."}}
},
)
def create_user(
self,
username: str,
email: str,
locale: str='en-US',
password: str=None,
family_name: str=None,
given_name: str=None,
names: list=None,
active: bool=True,
roles: list=None,
confirmed_email: bool=False,
_user=None
) -> str:
"""Create a new user
Args:
username (str): The username to use. Can be blank. If blank the username is the email.
password (str): The password to use.
email (str): The email address for the user. Should be unique
locale (str="en-US"): The name of the locale for the user
family_name (str): The user's family name
given_name (str): The user's given name
names (str): The user's middle name
active (bool): Default true. Whether or not the user is allowed to log in.
roles (list[roles]): List of role objects or urls. The list of roles a user is granted.
confirmed_email (bool): Default False. The user has confirmed their email address already.
Returns:
str: The url of the new user object.
Raises:
KeyError if the user already exists.
ValueError if the user's password does not pass muster.
"""
user_record, credentials = self.user_data(
username=username,
email=email,
locale=locale,
password=password,
family_name=family_name,
given_name=given_name,
names=names,
active=active,
roles=roles,
confirmed_email=confirmed_email,
)
user = self.create(user_record)
if credentials:
credentials['user'] = username
self.application['user-credentials'].create(credentials)
return user.url
def create_users(self, *users):
us = []
cs = []
for u, c in (self.user_data(**x) for x in users):
us.append(u)
if c is not None:
cs.append(c)
self.create(us)
if cs:
self.application['user-credentials'].create(cs)
@anonymous_method
@expose_method
def signup(
self,
username: str,
password: str,
email: str,
locale: str='en-US',
family_name: str=None,
given_name: str=None,
names: list=None
) -> bool:
"""Create a new user anonymously. by default the user is inactive and email is not confirmed. No roles can be
assigned except by an admin
Args:
username (str): The username to use. Can be blank. If blank the username is the email.
password (str): The password to use.
email (str): The email address for the user. Should be unique
locale (str="en-US"): The name of the locale for the user
familyName (str): The user's family name
givenName (str): The user's given name
names (str): The user's middle names
Returns:
str: The url of the new user object.
Raises:
KeyError if the user already exists.
ValueError if the user's password does not pass muster.
"""
self.create_user(
username=username,
password=password,
email=email,
family_name=family_name,
given_name=given_name,
names=names,
active=False
)
# self[username].send_confirmation_email()
return True
@anonymous_method
@expose_method
def by_email(self, email: str) -> 'sondra.auth.documents.User':
email = email.lower()
u = self.q(self.table.get_all(email, index='email'))
try:
return next(u).url
except:
return None
class LoggedInUsers(Collection):
primary_key = 'secret'
document_class = LoggedInUser
indexes = ['token']
private = True
def for_token(self, token):
result = self.table.get_all(token, index='token').run(self.application.connection)
try:
return self.document_class(next(result), self, True)
except StopIteration:
return None
def delete_token(self, token):
self.q(self.table.get_all(token, index='token').delete())
def delete_expired_tokens(self):
self.q(
self.table.filter(r.row['expires'] <= r.now())
)
class IssuedTokens(Collection):
primary_key = 'token'
document_class = IssuedToken
indexes = ['user', 'exp']
private = True
| JeffHeard/sondra | sondra/auth/collections.py | Python | apache-2.0 | 11,035 | 0.007431 |
from django.db import models
from django.contrib.auth.models import User
import requests
from datetime import datetime
from BeautifulSoup import BeautifulSoup
import re
SPOJ_ENDPOINT = "http://www.spoj.com/status/%s/signedlist/"
class SpojUser(models.Model):
user = models.OneToOneField(User)
spoj_handle = models.CharField(max_length=50)
points = models.FloatField(default=0)
rank = models.IntegerField(default=10000000)
problems_solved = models.IntegerField(default=0)
notify_via_email = models.BooleanField(default=True)
last_notified = models.DateTimeField(null=True, blank=True)
FREQUENCY_CHOICES = (
(1, 'Daily'),
(2, 'Once in 2 days'),
(3, 'Once in 3 days'),
(7, 'Weekly'),
(15, 'Fortnightly'),
(30, 'Monthly')
)
frequency = models.IntegerField(default=2,
verbose_name='Problem Suggestion Frequency',
choices=FREQUENCY_CHOICES)
def __unicode__(self):
return '%s (%s)' % (self.spoj_handle, self.user.email)
def ppp(self):
if self.problems_solved == 0:
return 'NA'
return str((self.points * 1.0) / self.problems_solved)[0:6]
def fetch_spoj_data(self):
if not self.spoj_handle:
return
response = requests.get(SPOJ_ENDPOINT % (self.spoj_handle))
for line in response.text.split('\n'):
if line and line[0] == '|':
fields = line.split('|')
if fields[4].strip() == 'AC':
problem, created = SpojProblem.objects.get_or_create(
problem=fields[3].strip())
dt = datetime.strptime(fields[2].strip(),
"%Y-%m-%d %H:%M:%S")
try:
Submission.objects.get(user=self.user,
problem=problem)
except:
Submission.objects.create(user=self.user,
problem=problem, timestamp=dt)
self.fetch_spoj_stats()
def fetch_spoj_stats(self):
response = requests.get('http://www.spoj.com/users/%s/' % (
self.spoj_handle))
rank = re.search('>#\d+', response.text)
points = re.search('(.* points)', response.text)
if not rank:
return
self.rank = rank.group(0)[2:]
if not points:
return
points = points.group()
try:
self.points = float(re.search("\d+.\d+", points).group())
except:
self.points = float(re.search("\d+", points).group())
soup = BeautifulSoup(response.text)
stats = soup.find("table", {"class": "problems"})
for index, row in enumerate(stats.findAll('tr')):
if index == 0:
continue
cols = []
for col in row.findAll('td'):
cols.append(int(col.text))
self.problems_solved = cols[0]
self.save()
class CodeGroup(models.Model):
name = models.CharField(max_length=100)
notifications = models.IntegerField(default=2,
verbose_name='Problem Notification Frequncy')
last_notified = models.DateField(null=True, blank=True)
def __unicode__(self):
return '%s' % (self.name)
class GroupMember(models.Model):
group = models.ForeignKey(CodeGroup)
user = models.ForeignKey(User, null=True, blank=True)
user_email = models.EmailField(verbose_name='Email')
invite_accepted = models.BooleanField(default=False)
receive_emails = models.BooleanField(default=False,
verbose_name='Send email notifications')
is_owner = models.BooleanField(default=False)
class Meta:
unique_together = ('group', 'user_email',)
def __unicode__(self):
return '%s - %s' % (self.group, self.user_email)
class SpojProblem(models.Model):
problem = models.CharField(max_length=40, unique=True)
solved_by = models.IntegerField(default=0)
category = models.CharField(max_length=100, null=True, blank=True)
is_tutorial = models.BooleanField(default=False)
SOURCE_CHOICES = (
('problem_classifier', 'Problem Classifier'),
('curated', 'Curated'),
)
source = models.CharField(max_length=50, null=True, blank=True,
choices=SOURCE_CHOICES)
difficulty = models.IntegerField(default=0, null=True, blank=True)
def __unicode__(self):
return self.problem
def fetch_stats(self):
if self.is_tutorial:
return
response = requests.get('http://www.spoj.com/ranks/%s/' % (
self.problem))
soup = BeautifulSoup(response.text)
stats = soup.find("table", {"class": "problems"})
for index, row in enumerate(stats.findAll('tr')):
if index == 0:
continue
cols = []
for col in row.findAll('td'):
cols.append(int(col.text))
self.solved_by = int(cols[0])
self.save()
self.categorize_tutorial_problems()
def categorize_tutorial_problems(self):
if self.is_tutorial:
return
response = requests.get('http://www.spoj.com/problems/%s/' % (
self.problem))
if '(tutorial)' in response.text:
self.is_tutorial = True
self.save()
class Submission(models.Model):
problem = models.ForeignKey(SpojProblem)
user = models.ForeignKey(User)
timestamp = models.DateTimeField()
def __unicode__(self):
return '%s - %s' % (self.problem, self.user.email)
class ProblemSuggestion(models.Model):
user = models.ForeignKey(User)
problem = models.ForeignKey(SpojProblem)
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ('user', 'problem',)
def __unicode__(self):
return '%s - %s' % (self.group, self.problem)
class UserSuggestion(models.Model):
group = models.ForeignKey(CodeGroup)
problem = models.ForeignKey(SpojProblem)
user = models.ForeignKey(User)
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
unique_together = ('group', 'problem',)
def __unicode__(self):
return '%s' % (self.problem)
class Discussion(models.Model):
title = models.CharField(max_length=200)
content = models.TextField(null=True, blank=True)
group = models.ForeignKey(CodeGroup)
owner = models.ForeignKey(User)
timestamp = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
class Reply(models.Model):
discussion = models.ForeignKey(Discussion)
content = models.TextField()
user = models.ForeignKey(User)
timestamp = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.content[:200]
| krisys/SpojBot | src/spojbot/bot/models.py | Python | mit | 6,922 | 0.002023 |
# The web is built with HTML strings like "<i>Yay</i>" which draws Yay as
# italic text. In this example, the "i" tag makes <i> and </i> which surround
# the word "Yay". Given tag and word strings, create the HTML string with tags
# around the word, e.g. "<i>Yay</i>".
# make_tags('i', 'Yay') --> '<i>Yay</i>'
# make_tags('i', 'Hello') --> '<i>Hello</i>'
# make_tags('cite', 'Yay') --> '<cite>Yay</cite>'
def make_tags(tag, word):
return "<{0}>{1}</{2}>".format(tag, word, tag)
print(make_tags('i', 'Yay'))
print(make_tags('i', 'Hello'))
print(make_tags('cite', 'Yay'))
| RCoon/CodingBat | Python/String_1/make_tags.py | Python | mit | 576 | 0.005208 |
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trainer for the chicago_taxi demo."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import tensorflow as tf
import tensorflow_model_analysis as tfma
import tensorflow_transform as tft
from trainer import model
from trainer import taxi
SERVING_MODEL_DIR = 'serving_model_dir'
EVAL_MODEL_DIR = 'eval_model_dir'
TRAIN_BATCH_SIZE = 40
EVAL_BATCH_SIZE = 40
# Number of nodes in the first layer of the DNN
FIRST_DNN_LAYER_SIZE = 100
NUM_DNN_LAYERS = 4
DNN_DECAY_FACTOR = 0.7
def train_and_maybe_evaluate(hparams):
"""Run the training and evaluate using the high level API.
Args:
hparams: Holds hyperparameters used to train the model as name/value pairs.
Returns:
The estimator that was used for training (and maybe eval)
"""
schema = taxi.read_schema(hparams.schema_file)
tf_transform_output = tft.TFTransformOutput(hparams.tf_transform_dir)
train_input = lambda: model.input_fn(
hparams.train_files,
tf_transform_output,
batch_size=TRAIN_BATCH_SIZE
)
eval_input = lambda: model.input_fn(
hparams.eval_files,
tf_transform_output,
batch_size=EVAL_BATCH_SIZE
)
train_spec = tf.estimator.TrainSpec(
train_input, max_steps=hparams.train_steps)
serving_receiver_fn = lambda: model.example_serving_receiver_fn(
tf_transform_output, schema)
exporter = tf.estimator.FinalExporter('chicago-taxi', serving_receiver_fn)
eval_spec = tf.estimator.EvalSpec(
eval_input,
steps=hparams.eval_steps,
exporters=[exporter],
name='chicago-taxi-eval')
run_config = tf.estimator.RunConfig(
save_checkpoints_steps=999, keep_checkpoint_max=1)
serving_model_dir = os.path.join(hparams.output_dir, SERVING_MODEL_DIR)
run_config = run_config.replace(model_dir=serving_model_dir)
estimator = model.build_estimator(
tf_transform_output,
# Construct layers sizes with exponetial decay
hidden_units=[
max(2, int(FIRST_DNN_LAYER_SIZE * DNN_DECAY_FACTOR**i))
for i in range(NUM_DNN_LAYERS)
],
config=run_config)
tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
return estimator
def run_experiment(hparams):
"""Train the model then export it for tf.model_analysis evaluation.
Args:
hparams: Holds hyperparameters used to train the model as name/value pairs.
"""
estimator = train_and_maybe_evaluate(hparams)
schema = taxi.read_schema(hparams.schema_file)
tf_transform_output = tft.TFTransformOutput(hparams.tf_transform_dir)
# Save a model for tfma eval
eval_model_dir = os.path.join(hparams.output_dir, EVAL_MODEL_DIR)
receiver_fn = lambda: model.eval_input_receiver_fn(
tf_transform_output, schema)
tfma.export.export_eval_savedmodel(
estimator=estimator,
export_dir_base=eval_model_dir,
eval_input_receiver_fn=receiver_fn)
def main():
parser = argparse.ArgumentParser()
# Input Arguments
parser.add_argument(
'--train-files',
help='GCS or local paths to training data',
nargs='+',
required=True)
parser.add_argument(
'--tf-transform-dir',
help='Tf-transform directory with model from preprocessing step',
required=True)
parser.add_argument(
'--output-dir',
help="""\
Directory under which which the serving model (under /serving_model_dir)\
and the tf-mode-analysis model (under /eval_model_dir) will be written\
""",
required=True)
parser.add_argument(
'--eval-files',
help='GCS or local paths to evaluation data',
nargs='+',
required=True)
# Training arguments
parser.add_argument(
'--job-dir',
help='GCS location to write checkpoints and export models',
required=True)
# Argument to turn on all logging
parser.add_argument(
'--verbosity',
choices=['DEBUG', 'ERROR', 'FATAL', 'INFO', 'WARN'],
default='INFO',
)
# Experiment arguments
parser.add_argument(
'--train-steps',
help='Count of steps to run the training job for',
required=True,
type=int)
parser.add_argument(
'--eval-steps',
help='Number of steps to run evalution for at each checkpoint',
default=100,
type=int)
parser.add_argument(
'--schema-file',
help='File holding the schema for the input data')
args = parser.parse_args()
# Set python level verbosity
tf.logging.set_verbosity(args.verbosity)
# Set C++ Graph Execution level verbosity
os.environ['TF_CPP_MIN_LOG_LEVEL'] = str(
tf.logging.__dict__[args.verbosity] / 10)
# Run the training job
hparams = tf.contrib.training.HParams(**args.__dict__)
run_experiment(hparams)
if __name__ == '__main__':
main()
| markflyhigh/incubator-beam | sdks/python/apache_beam/testing/benchmarks/chicago_taxi/trainer/task.py | Python | apache-2.0 | 5,417 | 0.009599 |
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Scientific Package. This package holds all simulators, and
# analysers necessary to run brain-simulations. You can use it stand alone or
# in conjunction with TheVirtualBrain-Framework Package. See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
All the little functions that make life nicer in the Traits package.
.. moduleauthor:: Mihai Andrei <[email protected]>
.. moduleauthor:: Lia Domide <[email protected]>
.. moduleauthor:: marmaduke <[email protected]>
"""
import numpy
import collections
import inspect
from tvb.basic.profile import TvbProfile
# returns true if key is, by convention, public
ispublic = lambda key: key[0] is not '_'
def str_class_name(thing, short_form=False):
"""
A helper function that tries to generate an informative name for its
argument: when passed a class, return its name, when passed an object
return a string representation of that value.
"""
# if thing is a class, it has attribute __name__
if hasattr(thing, '__name__'):
cls = thing
if short_form:
return cls.__name__
return cls.__module__ + '.' + cls.__name__
else:
# otherwise, it's an object and we return its __str__
return str(thing)
def get(obj, key, default=None):
"""
get() is a general function allowing us to ignore whether we are
getting from a dictionary or object. If obj is a dictionary, we
return the value corresponding to key, otherwise we return the
attribute on obj corresponding to key. In both cases, if key
does not exist, default is returned.
"""
if type(obj) is dict:
return obj.get(key, default)
else:
return getattr(obj, key) if hasattr(obj, key) else default
def log_debug_array(log, array, array_name, owner=""):
"""
Simple access to debugging info on an array.
"""
if TvbProfile.current.TRAITS_CONFIGURATION.use_storage:
return
# Hide this logs in web-mode, with storage, because we have multiple storage exceptions
if owner != "":
name = ".".join((owner, array_name))
else:
name = array_name
if array is not None and hasattr(array, 'shape'):
shape = str(array.shape)
dtype = str(array.dtype)
has_nan = str(numpy.isnan(array).any())
array_max = str(array.max())
array_min = str(array.min())
log.debug("%s shape: %s" % (name, shape))
log.debug("%s dtype: %s" % (name, dtype))
log.debug("%s has NaN: %s" % (name, has_nan))
log.debug("%s maximum: %s" % (name, array_max))
log.debug("%s minimum: %s" % (name, array_min))
else:
log.debug("%s is None or not Array" % name)
Args = collections.namedtuple('Args', 'pos kwd')
class TypeRegister(list):
"""
TypeRegister is a smart list that can be queried to obtain selections of the
classes inheriting from Traits classes.
"""
def subclasses(self, obj, avoid_subclasses=False):
"""
The subclasses method takes a class (or given instance object, will use
the class of the instance), and returns a list of all options known to
this TypeRegister that are direct subclasses of the class or have the
class in their base class list.
:param obj: Class or instance
:param avoid_subclasses: When specified, subclasses are not retrieved, only current class.
"""
cls = obj if inspect.isclass(obj) else obj.__class__
if avoid_subclasses:
return [cls]
if hasattr(cls, '_base_classes'):
bases = cls._base_classes
else:
bases = []
sublcasses = [opt for opt in self if ((issubclass(opt, cls) or cls in opt.__bases__)
and not inspect.isabstract(opt) and opt.__name__ not in bases)]
return sublcasses
def multiline_math_directives_to_matjax(doc):
"""
Looks for multi-line sphinx math directives in the given rst string
It converts them in html text that will be interpreted by mathjax
The parsing is simplistic, not a rst parser.
Wraps .. math :: body in \[\begin{split}\end{split}\]
"""
# doc = text | math
BEGIN = r'\[\begin{split}'
END = r'\end{split}\]'
in_math = False # 2 state parser
out_lines = []
indent = ''
for line in doc.splitlines():
if not in_math:
# math = indent directive math_body
indent, sep, _ = line.partition('.. math::')
if sep:
out_lines.append(BEGIN)
in_math = True
else:
out_lines.append(line)
else:
# math body is at least 1 space more indented than the directive, but we tolerate empty lines
if line.startswith(indent + ' ') or line.strip() == '':
out_lines.append(line)
else:
# this line is not properly indented, math block is over
out_lines.append(END)
out_lines.append(line)
in_math = False
if in_math:
# close math tag
out_lines.append(END)
return '\n'.join(out_lines) | echohenry2006/tvb-library | tvb/basic/traits/util.py | Python | gpl-2.0 | 6,354 | 0.003777 |
from loaderio.resources.client import Client
class Servers(Client):
"""
"""
def __init__(self, api_key):
Client.__init__(self, api_key)
pass
def list(self):
return self.request('GET', 'servers') | kenyaapps/loaderio | loaderio/resources/servers.py | Python | mit | 207 | 0.043478 |
import re
import sjconfparts.exceptions
class Error(sjconfparts.exceptions.Error):
pass
class ConversionError(Error):
pass
class ConversionList:
"""Custom list implementation, linked to the related Conf.
Each modification of the list will auto-update the string representation
of the list directly in the Conf object, via a call to
self.conversion_method().
Nowadays this is considered ugly (maybe it wasn't back in 2008 with Python 2.5?),
but no one wants nor has time to redevelop a big part of SJConf to get rid of this.
(aka don't blame the current dev who just wants to port this mess to Python3 :-p)
Starting from Python3/new style classes, all used special methods must be
explicitly redefined:
https://docs.python.org/3/reference/datamodel.html#special-lookup
"""
def __add__(self, other):
self.innerList.__add__(other)
self.conversion_method()
def __init__(self, conversion_method, list_object=None):
self.conversion_method = conversion_method
if list_object == None:
list_object = []
self.innerList = list_object
def __contains__(self, item):
return self.innerList.__contains__(item)
def __delitem__(self, key):
self.innerList.__delitem__(key)
self.conversion_method()
def __getitem__(self, key):
self.innerList.__getitem__(key)
def __iadd__(self, other):
self.innerList.__iadd__(other)
self.conversion_method()
def __imul__(self, other):
self.innerList.__imul__(other)
self.conversion_method()
def __iter__(self):
return self.innerList.__iter__()
def __len__(self):
return self.innerList.__len__()
def __mul__(self, other):
self.innerList.__mul__(other)
self.conversion_method()
def __reversed__(self, other):
self.innerList.__reversed__(other)
self.conversion_method()
def __rmul__(self, other):
self.innerList.__rmul__(other)
self.conversion_method()
def __setitem__(self, key, value):
self.innerList.__setitem__(key, value)
self.conversion_method()
def __str__(self):
return self.innerList.__str__()
def __getattr__(self, name):
list_method = getattr(self.innerList, name)
def method(*args, **kw):
result = list_method(*args, **kw)
if name in (
"append",
"extend",
"insert",
"pop",
"remove",
"reverse",
"sort",
):
self.conversion_method()
return result
return method
class Type:
class ConversionBadTypeError(ConversionError):
def __init__(self, type_source, type_dest):
self.msg = "Invalid conversion from type %s to type %s, can only convert from str or to str"
@classmethod
def convert(cls, type_source, type_dest, dict_source, dict_dest, key):
if type_source == "str":
type_class_name = type_dest.capitalize()
elif type_dest == "str":
type_class_name = type_source.capitalize()
else:
raise Type.ConversionBadTypeError(type_source, type_dest)
type_class = getattr(cls, type_class_name)
return getattr(type_class, type_source + "_to_" + type_dest)(
dict_source, dict_dest, key
)
@classmethod
def convert_safe(cls, type_source, type_dest, dict_source, dict_dest, key):
if type_source == "str":
type_class_name = type_dest.capitalize()
elif type_dest == "str":
type_class_name = type_source.capitalize()
else:
raise Type.ConversionBadTypeError(type_source, type_dest)
type_class = getattr(cls, type_class_name)
if hasattr(type_class, type_source + "_to_" + type_dest + "_safe"):
return getattr(type_class, type_source + "_to_" + type_dest + "_safe")(
dict_source, dict_dest, key
)
else:
return getattr(type_class, type_source + "_to_" + type_dest)(
dict_source, dict_dest, key
)
@classmethod
def convert_key(cls, key, type):
return cls._convert_method("key", key, type)
@classmethod
def convert_value(cls, value, type, dict_str, dict_type, key):
return cls._convert_method("value", value, type, dict_str, dict_type, key)
@classmethod
def convert_key_for_search(cls, key, type):
return cls._convert_method("key_for_search", key, type)
@classmethod
def _convert_method(cls, method, value, type, *args):
type_class = getattr(cls, type.capitalize())
if not hasattr(type_class, method):
converted_value = value
else:
converted_value = getattr(type_class, method)(value, *args)
return converted_value
class List:
@classmethod
def value(cls, value, dict_str, dict_type, key):
def conversion_method():
Type.List.list_to_str(dict_type, dict_str, key)
return ConversionList(conversion_method, value)
@classmethod
def str_to_list(cls, dict_source, dict_dest, key):
def conversion_method():
Type.List.list_to_str(dict_dest, dict_source, key)
str_object = dict_source[key]
li = list(map(str.strip, str_object.split(",")))
try:
li.remove("")
except ValueError:
pass
dict_dest[key] = ConversionList(conversion_method, li)
return dict_dest
@classmethod
def str_to_list_safe(cls, dict_source, dict_dest, key):
str_object = dict_source[key]
list_object = list(map(str.strip, str_object.split(",")))
try:
list_object.remove("")
except ValueError:
pass
dict_dest[key] = list_object
return dict_dest
@classmethod
def list_to_str(cls, dict_source, dict_dest, key):
list_object = dict_source[key]
str_object = ", ".join(list_object)
dict_dest[key] = str_object
return dict_dest
class Bool:
TRUE_VALUES = ("yes", "on", "true", "enabled", "enable")
FALSE_VALUES = ("no", "off", "false", "disabled", "disable")
class StrToBoolError(ConversionError):
def __init__(self, str_object):
self.msg = (
'Bad value "%s" for str to bool conversion, expected a value in %s'
% (str_object, str(Type.Bool.TRUE_VALUES + Type.Bool.FALSE_VALUES))
)
class BoolToStrError(ConversionError):
def __init__(self, bool_object):
self.msg = (
'Bad value "%s" for bool to str conversion, expected a boolean'
% (bool_object)
)
@classmethod
def str_to_bool(cls, dict_source, dict_dest, key):
str_object = dict_source[key]
if str_object.lower() in Type.Bool.TRUE_VALUES:
bool_object = True
elif str_object.lower() in Type.Bool.FALSE_VALUES:
bool_object = False
else:
raise Type.Bool.StrToBoolError(str_object)
dict_dest[key] = bool_object
return dict_dest
@classmethod
def bool_to_str(cls, dict_source, dict_dest, key):
bool_object = dict_source[key]
if bool_object == True:
str_object = "yes"
elif bool_object == False:
str_object = "no"
else:
raise Type.Bool.BoolToStrError(bool_object)
dict_dest[key] = str_object
return dict_dest
class Size:
class StrToSizeError(ConversionError):
def __init__(self, str_object):
self.msg = (
'Bad value "%s" for str to size conversion, expected a value like, e.g. 10M'
% (str_object)
)
class SizeToStrError(ConversionError):
def __init__(self, size_object):
self.msg = (
'Bad value "%s" for size to str conversion, expected an integer'
% (size_object)
)
@classmethod
def str_to_size(cls, dict_source, dict_dest, key):
str_object = dict_source[key]
suffixes = ["T", "G", "M", "k"]
match_result = re.compile("^(\d+)([%s])?$" % ("".join(suffixes))).match(
str_object
)
if match_result == None:
raise Type.Size.StrToSizeError(str_object)
size, suffix = match_result.groups("")
size_object = int(size)
while len(suffixes) > 0:
if suffix in suffixes:
size_object *= 1024
suffixes.pop()
dict_dest[key] = size_object
return dict_dest
@classmethod
def size_to_str(cls, dict_source, dict_dest, key):
try:
size_object = int(dict_source[key])
except ValueError:
raise Type.Size.SizeToStrError(size_object)
for suffix_to_test in ("k", "M", "G", "T"):
if size_object > 1024:
suffix = suffix_to_test
size_object /= 1024
str_object = str(size_object) + suffix
dict_dest[key] = str_object
return dict_dest
class Sequence:
@classmethod
def key(cls, key):
match_results = re.compile("^(.*)-\d+$").match(key)
if match_results:
key = match_results.group(1)
return key
@classmethod
def key_for_search(cls, key):
if not hasattr(key, "search"):
key = cls.key(key)
key = re.compile("^%s(-\d+)?$" % (key))
return key
@classmethod
def value(cls, value, dict_str, dict_type, key):
def conversion_method():
Type.Sequence.sequence_to_str(dict_type, dict_str, key)
return ConversionList(conversion_method, value)
@classmethod
def key_to_index(cls, key, key_to_convert):
index = key_to_convert[len(key) + 1 :]
if index == "":
index = -1
else:
index = int(index)
return index
@classmethod
def str_to_sequence(cls, dict_source, dict_dest, key):
def conversion_method():
Type.Sequence.sequence_to_str(dict_dest, dict_source, key)
str_object = []
key = cls.key(key)
regexp = re.compile("^%s-\d+$" % (key))
for (key_to_test, value) in dict_source.items():
if key_to_test == key or regexp.match(key_to_test):
str_object.append((key_to_test, value))
str_object.sort(key=lambda str_object: cls.key_to_index(key, str_object[0]))
sequence_object = ConversionList(
conversion_method, [value for (str_key, value) in str_object]
)
dict_dest[key] = sequence_object
return dict_dest
@classmethod
def str_to_sequence_safe(cls, dict_source, dict_dest, key):
str_object = []
key = cls.key(key)
regexp = re.compile("^%s-\d+$" % (key))
for (key_to_test, value) in dict_source.items():
if key_to_test == key or regexp.match(key_to_test):
str_object.append((key_to_test, value))
str_object.sort(key=lambda str_object: cls.key_to_index(key, str_object[0]))
dict_dest[key] = [value for (str_key, value) in str_object]
return dict_dest
@classmethod
def assign_elts(cls, elts, assignments_old, indices_unassigned):
def _assign_unassigned(
indices, elts_unassigned, indices_unassigned, index_prev, index
):
indices_available = [
index_unassigned
for index_unassigned in indices_unassigned
if index_unassigned > index_prev
and (index_unassigned < index or index < -1)
]
for index_available in indices_available:
indices_unassigned.remove(index_available)
while len(indices_available) > len(elts_unassigned) - (
index >= -1 and 1 or 0
):
indices_available.pop()
indices_available.append(index)
indices_to_assign = []
for index_available in indices_available:
while len(indices_to_assign) < len(elts_unassigned) - (
index_available >= -1 and 1 or 0
):
if index_prev < index_available - 1 or index_available < -1:
index_prev += 1
indices_to_assign.append(index_prev)
if index_available >= -1:
indices_to_assign.append(index_available)
index_prev = index_available
while len(elts_unassigned) > 0:
elts_unassigned.pop(0)
index_prev = indices_to_assign.pop(0)
indices.append(index_prev)
return index_prev
elts_unassigned = []
indices = []
index_prev = 0
for elt in elts:
elts_unassigned.append(elt)
if elt in assignments_old:
index = assignments_old[elt]
if index > index_prev and (
len(elts_unassigned) == 1
or len(elts_unassigned) <= index - index_prev
):
index_prev = _assign_unassigned(
indices,
elts_unassigned,
indices_unassigned,
index_prev,
index,
)
index_prev = _assign_unassigned(
indices, elts_unassigned, indices_unassigned, index_prev, -2
)
return indices
@classmethod
def sequence_to_str(cls, dict_source, dict_dest, key):
key = cls.key(key)
sequence_object = [elt for elt in list(dict_source[key]) if elt != ""]
regexp = re.compile("^%s-\d+$" % (key))
str_keys = [
key_to_test for key_to_test in dict_dest if regexp.match(key_to_test)
]
keys_unassigned = [
str_key for str_key in str_keys if dict_dest[str_key] == ""
]
str_keys = [
str_key for str_key in str_keys if str_key not in keys_unassigned
]
assignments_old = dict(
[
(dict_dest[str_key], cls.key_to_index(key, str_key))
for str_key in sorted(
str_keys,
key=lambda key_to_convert: cls.key_to_index(
key, key_to_convert
),
)
]
)
indices = cls.assign_elts(
sequence_object,
assignments_old,
[
cls.key_to_index(key, key_to_convert)
for key_to_convert in keys_unassigned
],
)
for str_key in str_keys:
del dict_dest[str_key]
while len(sequence_object) > 0:
elt = sequence_object.pop(0)
index = indices.pop(0)
dict_dest[key + "-" + str(index)] = elt
return dict_dest
| SmartJog/sjconf | sjconfparts/type.py | Python | lgpl-2.1 | 16,215 | 0.001788 |
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# file: $Id$
# auth: Philip J Grabner <[email protected]>
# date: 2013/07/31
# copy: (C) Copyright 2013 Cadit Health Inc., All Rights Reserved.
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
class Modifier(object):
#----------------------------------------------------------------------------
def modify(self, mailfrom, recipients, data):
'''
Modifies the prepared email for sending.
:Parameters:
mailfrom : str
the SMTP-level `MAILFROM` command argument.
recipients : { list, tuple }
an iterable of the SMTP-level `RCPTTO` command arguments.
data : { str, email.MIMEMessage }
represents the SMTP-level `DATA` command argument, and can
either be a subclass of `email.MIMEMessage` or the raw SMTP data
(as generated by a call to `email.MIMEMessage.as_string()`).
:Returns:
tuple
A three-element tuple with the adjusted `mailfrom`, `recipients`
and `data` values.
'''
raise NotImplementedError()
#------------------------------------------------------------------------------
class ChainingModifier(Modifier):
#----------------------------------------------------------------------------
def __init__(self, modifiers=[]):
self.modifiers = modifiers
#----------------------------------------------------------------------------
def addModifier(self, modifier):
self.modifiers.append(modifier)
return self
#----------------------------------------------------------------------------
def modify(self, mailfrom, recipients, data):
for mod in self.modifiers:
mailfrom, recipients, data = mod.modify(mailfrom, recipients, data)
return (mailfrom, recipients, data)
#------------------------------------------------------------------------------
# end of $Id$
#------------------------------------------------------------------------------
| cadithealth/genemail | genemail/modifier/base.py | Python | mit | 2,085 | 0.009592 |
""" ProxyManager is the implementation of the ProxyManagement service in the DISET framework
.. literalinclude:: ../ConfigTemplate.cfg
:start-after: ##BEGIN ProxyManager:
:end-before: ##END
:dedent: 2
:caption: ProxyManager options
"""
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.DISET.RequestHandler import RequestHandler, getServiceOption
from DIRAC.Core.Security import Properties
from DIRAC.Core.Utilities.ThreadScheduler import gThreadScheduler
from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
DEFAULT_MAIL_FROM = "[email protected]"
class ProxyManagerHandler(RequestHandler):
__maxExtraLifeFactor = 1.5
__proxyDB = None
@classmethod
def initializeHandler(cls, serviceInfoDict):
useMyProxy = cls.srv_getCSOption("UseMyProxy", False)
mailFrom = getServiceOption(serviceInfoDict, "MailFrom", DEFAULT_MAIL_FROM)
try:
result = ObjectLoader().loadObject("FrameworkSystem.DB.ProxyDB")
if not result["OK"]:
gLogger.error("Failed to load ProxyDB class: %s" % result["Message"])
return result
dbClass = result["Value"]
cls.__proxyDB = dbClass(useMyProxy=useMyProxy, mailFrom=mailFrom)
except RuntimeError as excp:
return S_ERROR("Can't connect to ProxyDB: %s" % excp)
gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredTokens, elapsedTime=900)
gThreadScheduler.addPeriodicTask(900, cls.__proxyDB.purgeExpiredRequests, elapsedTime=900)
gThreadScheduler.addPeriodicTask(21600, cls.__proxyDB.purgeLogs)
gThreadScheduler.addPeriodicTask(3600, cls.__proxyDB.purgeExpiredProxies)
if useMyProxy:
gLogger.info("MyProxy: %s\n MyProxy Server: %s" % (useMyProxy, cls.__proxyDB.getMyProxyServer()))
return S_OK()
def __generateUserProxiesInfo(self):
"""Generate information dict about user proxies
:return: dict
"""
proxiesInfo = {}
credDict = self.getRemoteCredentials()
result = Registry.getDNForUsername(credDict["username"])
if not result["OK"]:
return result
selDict = {"UserDN": result["Value"]}
result = self.__proxyDB.getProxiesContent(selDict, {})
if not result["OK"]:
return result
contents = result["Value"]
userDNIndex = contents["ParameterNames"].index("UserDN")
userGroupIndex = contents["ParameterNames"].index("UserGroup")
expirationIndex = contents["ParameterNames"].index("ExpirationTime")
for record in contents["Records"]:
userDN = record[userDNIndex]
if userDN not in proxiesInfo:
proxiesInfo[userDN] = {}
userGroup = record[userGroupIndex]
proxiesInfo[userDN][userGroup] = record[expirationIndex]
return proxiesInfo
def __addKnownUserProxiesInfo(self, retDict):
"""Given a S_OK/S_ERR add a proxies entry with info of all the proxies a user has uploaded
:return: S_OK(dict)/S_ERROR()
"""
retDict["proxies"] = self.__generateUserProxiesInfo()
return retDict
auth_getUserProxiesInfo = ["authenticated"]
types_getUserProxiesInfo = []
def export_getUserProxiesInfo(self):
"""Get the info about the user proxies in the system
:return: S_OK(dict)
"""
return S_OK(self.__generateUserProxiesInfo())
# WARN: Since v7r1 requestDelegationUpload method use only first argument!
# WARN: Second argument for compatibility with older versions
types_requestDelegationUpload = [int]
def export_requestDelegationUpload(self, requestedUploadTime, diracGroup=None):
"""Request a delegation. Send a delegation request to client
:param int requestedUploadTime: requested live time
:return: S_OK(dict)/S_ERROR() -- dict contain id and proxy as string of the request
"""
if diracGroup:
self.log.warn("Since v7r1 requestDelegationUpload method use only first argument!")
credDict = self.getRemoteCredentials()
user = "%s:%s" % (credDict["username"], credDict["group"])
result = self.__proxyDB.generateDelegationRequest(credDict["x509Chain"], credDict["DN"])
if result["OK"]:
gLogger.info("Upload request by %s given id %s" % (user, result["Value"]["id"]))
else:
gLogger.error("Upload request failed", "by %s : %s" % (user, result["Message"]))
return result
types_completeDelegationUpload = [int, str]
def export_completeDelegationUpload(self, requestId, pemChain):
"""Upload result of delegation
:param int requestId: identity number
:param str pemChain: certificate as string
:return: S_OK(dict)/S_ERROR() -- dict contain proxies
"""
credDict = self.getRemoteCredentials()
userId = "%s:%s" % (credDict["username"], credDict["group"])
retVal = self.__proxyDB.completeDelegation(requestId, credDict["DN"], pemChain)
if not retVal["OK"]:
gLogger.error("Upload proxy failed", "id: %s user: %s message: %s" % (requestId, userId, retVal["Message"]))
return self.__addKnownUserProxiesInfo(retVal)
gLogger.info("Upload %s by %s completed" % (requestId, userId))
return self.__addKnownUserProxiesInfo(S_OK())
types_getRegisteredUsers = []
def export_getRegisteredUsers(self, validSecondsRequired=0):
"""Get the list of users who have a valid proxy in the system
:param int validSecondsRequired: required seconds the proxy is valid for
:return: S_OK(list)/S_ERROR() -- list contain dicts with user name, DN, group
expiration time, persistent flag
"""
credDict = self.getRemoteCredentials()
if Properties.PROXY_MANAGEMENT not in credDict["properties"]:
return self.__proxyDB.getUsers(validSecondsRequired, userMask=credDict["username"])
return self.__proxyDB.getUsers(validSecondsRequired)
def __checkProperties(self, requestedUserDN, requestedUserGroup):
"""Check the properties and return if they can only download limited proxies if authorized
:param str requestedUserDN: user DN
:param str requestedUserGroup: DIRAC group
:return: S_OK(boolean)/S_ERROR()
"""
credDict = self.getRemoteCredentials()
if Properties.FULL_DELEGATION in credDict["properties"]:
return S_OK(False)
if Properties.LIMITED_DELEGATION in credDict["properties"]:
return S_OK(True)
if Properties.PRIVATE_LIMITED_DELEGATION in credDict["properties"]:
if credDict["DN"] != requestedUserDN:
return S_ERROR("You are not allowed to download any proxy")
if Properties.PRIVATE_LIMITED_DELEGATION not in Registry.getPropertiesForGroup(requestedUserGroup):
return S_ERROR("You can't download proxies for that group")
return S_OK(True)
# Not authorized!
return S_ERROR("You can't get proxies!")
types_getProxy = [str, str, str, int]
def export_getProxy(self, userDN, userGroup, requestPem, requiredLifetime):
"""Get a proxy for a userDN/userGroup
:param requestPem: PEM encoded request object for delegation
:param requiredLifetime: Argument for length of proxy
* Properties:
* FullDelegation <- permits full delegation of proxies
* LimitedDelegation <- permits downloading only limited proxies
* PrivateLimitedDelegation <- permits downloading only limited proxies for one self
"""
credDict = self.getRemoteCredentials()
result = self.__checkProperties(userDN, userGroup)
if not result["OK"]:
return result
forceLimited = result["Value"]
self.__proxyDB.logAction("download proxy", credDict["DN"], credDict["group"], userDN, userGroup)
return self.__getProxy(userDN, userGroup, requestPem, requiredLifetime, forceLimited)
def __getProxy(self, userDN, userGroup, requestPem, requiredLifetime, forceLimited):
"""Internal to get a proxy
:param str userDN: user DN
:param str userGroup: DIRAC group
:param str requestPem: dump of request certificate
:param int requiredLifetime: requested live time of proxy
:param boolean forceLimited: limited proxy
:return: S_OK(str)/S_ERROR()
"""
retVal = self.__proxyDB.getProxy(userDN, userGroup, requiredLifeTime=requiredLifetime)
if not retVal["OK"]:
return retVal
chain, secsLeft = retVal["Value"]
# If possible we return a proxy 1.5 longer than requested
requiredLifetime = int(min(secsLeft, requiredLifetime * self.__maxExtraLifeFactor))
retVal = chain.generateChainFromRequestString(
requestPem, lifetime=requiredLifetime, requireLimited=forceLimited
)
if not retVal["OK"]:
return retVal
return S_OK(retVal["Value"])
types_getVOMSProxy = [str, str, str, int, [str, type(None), bool]]
def export_getVOMSProxy(self, userDN, userGroup, requestPem, requiredLifetime, vomsAttribute=None):
"""Get a proxy for a userDN/userGroup
:param requestPem: PEM encoded request object for delegation
:param requiredLifetime: Argument for length of proxy
:param vomsAttribute: VOMS attr to add to the proxy
* Properties :
* FullDelegation <- permits full delegation of proxies
* LimitedDelegation <- permits downloading only limited proxies
* PrivateLimitedDelegation <- permits downloading only limited proxies for one self
"""
credDict = self.getRemoteCredentials()
result = self.__checkProperties(userDN, userGroup)
if not result["OK"]:
return result
forceLimited = result["Value"]
self.__proxyDB.logAction("download voms proxy", credDict["DN"], credDict["group"], userDN, userGroup)
return self.__getVOMSProxy(userDN, userGroup, requestPem, requiredLifetime, vomsAttribute, forceLimited)
def __getVOMSProxy(self, userDN, userGroup, requestPem, requiredLifetime, vomsAttribute, forceLimited):
retVal = self.__proxyDB.getVOMSProxy(
userDN, userGroup, requiredLifeTime=requiredLifetime, requestedVOMSAttr=vomsAttribute
)
if not retVal["OK"]:
return retVal
chain, secsLeft = retVal["Value"]
# If possible we return a proxy 1.5 longer than requested
requiredLifetime = int(min(secsLeft, requiredLifetime * self.__maxExtraLifeFactor))
return chain.generateChainFromRequestString(requestPem, lifetime=requiredLifetime, requireLimited=forceLimited)
types_setPersistency = [str, str, bool]
def export_setPersistency(self, userDN, userGroup, persistentFlag):
"""Set the persistency for a given dn/group
:param str userDN: user DN
:param str userGroup: DIRAC group
:param boolean persistentFlag: if proxy persistent
:return: S_OK()/S_ERROR()
"""
retVal = self.__proxyDB.setPersistencyFlag(userDN, userGroup, persistentFlag)
if not retVal["OK"]:
return retVal
credDict = self.getRemoteCredentials()
self.__proxyDB.logAction(
"set persistency to %s" % bool(persistentFlag), credDict["DN"], credDict["group"], userDN, userGroup
)
return S_OK()
types_deleteProxyBundle = [(list, tuple)]
def export_deleteProxyBundle(self, idList):
"""delete a list of id's
:param list,tuple idList: list of identity numbers
:return: S_OK(int)/S_ERROR()
"""
errorInDelete = []
deleted = 0
for _id in idList:
if len(_id) != 2:
errorInDelete.append("%s doesn't have two fields" % str(_id))
retVal = self.export_deleteProxy(_id[0], _id[1])
if not retVal["OK"]:
errorInDelete.append("%s : %s" % (str(_id), retVal["Message"]))
else:
deleted += 1
if errorInDelete:
return S_ERROR("Could not delete some proxies: %s" % ",".join(errorInDelete))
return S_OK(deleted)
types_deleteProxy = [(list, tuple)]
def export_deleteProxy(self, userDN, userGroup):
"""Delete a proxy from the DB
:param str userDN: user DN
:param str userGroup: DIRAC group
:return: S_OK()/S_ERROR()
"""
credDict = self.getRemoteCredentials()
if Properties.PROXY_MANAGEMENT not in credDict["properties"]:
if userDN != credDict["DN"]:
return S_ERROR("You aren't allowed!")
retVal = self.__proxyDB.deleteProxy(userDN, userGroup)
if not retVal["OK"]:
return retVal
self.__proxyDB.logAction("delete proxy", credDict["DN"], credDict["group"], userDN, userGroup)
return S_OK()
types_getContents = [dict, (list, tuple), int, int]
def export_getContents(self, selDict, sortDict, start, limit):
"""Retrieve the contents of the DB
:param dict selDict: selection fields
:param list,tuple sortDict: sorting fields
:param int start: search limit start
:param int start: search limit amount
:return: S_OK(dict)/S_ERROR() -- dict contain fields, record list, total records
"""
credDict = self.getRemoteCredentials()
if Properties.PROXY_MANAGEMENT not in credDict["properties"]:
selDict["UserName"] = credDict["username"]
return self.__proxyDB.getProxiesContent(selDict, sortDict, start, limit)
types_getLogContents = [dict, (list, tuple), int, int]
def export_getLogContents(self, selDict, sortDict, start, limit):
"""Retrieve the contents of the DB
:param dict selDict: selection fields
:param list,tuple sortDict: search filter
:param int start: search limit start
:param int start: search limit amount
:return: S_OK(dict)/S_ERROR() -- dict contain fields, record list, total records
"""
return self.__proxyDB.getLogsContent(selDict, sortDict, start, limit)
types_generateToken = [str, str, int]
def export_generateToken(self, requesterDN, requesterGroup, tokenUses):
"""Generate tokens for proxy retrieval
:param str requesterDN: user DN
:param str requesterGroup: DIRAC group
:param int tokenUses: number of uses
:return: S_OK(tuple)/S_ERROR() -- tuple contain token, number uses
"""
credDict = self.getRemoteCredentials()
self.__proxyDB.logAction("generate tokens", credDict["DN"], credDict["group"], requesterDN, requesterGroup)
return self.__proxyDB.generateToken(requesterDN, requesterGroup, numUses=tokenUses)
types_getProxyWithToken = [str, str, str, int, str]
def export_getProxyWithToken(self, userDN, userGroup, requestPem, requiredLifetime, token):
"""Get a proxy for a userDN/userGroup
:param requestPem: PEM encoded request object for delegation
:param requiredLifetime: Argument for length of proxy
:param token: Valid token to get a proxy
* Properties:
* FullDelegation <- permits full delegation of proxies
* LimitedDelegation <- permits downloading only limited proxies
* PrivateLimitedDelegation <- permits downloading only limited proxies for one self
"""
credDict = self.getRemoteCredentials()
result = self.__proxyDB.useToken(token, credDict["DN"], credDict["group"])
gLogger.info("Trying to use token %s by %s:%s" % (token, credDict["DN"], credDict["group"]))
if not result["OK"]:
return result
if not result["Value"]:
return S_ERROR("Proxy token is invalid")
self.__proxyDB.logAction("used token", credDict["DN"], credDict["group"], userDN, userGroup)
result = self.__checkProperties(userDN, userGroup)
if not result["OK"]:
return result
self.__proxyDB.logAction("download proxy with token", credDict["DN"], credDict["group"], userDN, userGroup)
return self.__getProxy(userDN, userGroup, requestPem, requiredLifetime, True)
types_getVOMSProxyWithToken = [str, str, str, int, [str, type(None)]]
def export_getVOMSProxyWithToken(self, userDN, userGroup, requestPem, requiredLifetime, token, vomsAttribute=None):
"""Get a proxy for a userDN/userGroup
:param requestPem: PEM encoded request object for delegation
:param requiredLifetime: Argument for length of proxy
:param vomsAttribute: VOMS attr to add to the proxy
* Properties :
* FullDelegation <- permits full delegation of proxies
* LimitedDelegation <- permits downloading only limited proxies
* PrivateLimitedDelegation <- permits downloading only limited proxies for one self
"""
credDict = self.getRemoteCredentials()
result = self.__proxyDB.useToken(token, credDict["DN"], credDict["group"])
if not result["OK"]:
return result
if not result["Value"]:
return S_ERROR("Proxy token is invalid")
self.__proxyDB.logAction("used token", credDict["DN"], credDict["group"], userDN, userGroup)
result = self.__checkProperties(userDN, userGroup)
if not result["OK"]:
return result
self.__proxyDB.logAction("download voms proxy with token", credDict["DN"], credDict["group"], userDN, userGroup)
return self.__getVOMSProxy(userDN, userGroup, requestPem, requiredLifetime, vomsAttribute, True)
| ic-hep/DIRAC | src/DIRAC/FrameworkSystem/Service/ProxyManagerHandler.py | Python | gpl-3.0 | 18,027 | 0.003162 |
# Copyright 2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
def notify_init_event(agent_type, agent):
"""Notify init event for the specified agent."""
registry.publish(agent_type, events.AFTER_INIT, agent)
def register(callback, agent_type):
"""Subscribe callback to init event for the specified agent.
:param agent_type: an agent type as defined in neutron_lib.constants.
:param callback: a callback that can process the agent init event.
"""
registry.subscribe(callback, agent_type, events.AFTER_INIT)
| noironetworks/neutron | neutron/plugins/ml2/drivers/agent/capabilities.py | Python | apache-2.0 | 1,161 | 0 |
import sys
# It may be that the interpreter (wether python or pypy-c) was not linked
# with C++; force its loading before doing anything else (note that not
# linking with C++ spells trouble anyway for any C++ libraries ...)
if 'linux' in sys.platform and 'GCC' in sys.version:
# TODO: check executable to see whether linking indeed didn't happen
import ctypes
try:
stdcpp = ctypes.CDLL('libstdc++.so', ctypes.RTLD_GLOBAL)
except Exception:
pass
# TODO: what if Linux/clang and what if Mac?
| root-mirror/root | bindings/pyroot/cppyy/cppyy/python/cppyy/_stdcpp_fix.py | Python | lgpl-2.1 | 524 | 0 |
from flask import Flask
app = Flask(__name__)
from media import Movie
from flask import render_template
import re
@app.route('/')
def index():
'''View function for index page.'''
toy_story = Movie(title = "Toy Story 3", trailer_youtube_url ="https://www.youtube.com/watch?v=QW0sjQFpXTU",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BMTgxOTY4Mjc0MF5BMl5BanBnXkFtZTcwNTA4MDQyMw@@._V1_UY268_CR3,0,182,268_AL_.jpg",
storyline='''Andy's toys get mistakenly delivered to a day care centre.
Woody convinces the other toys that they weren't dumped and leads them on an expedition back
home.''')
pulp_fiction = Movie(title = "Pulp Fiction ", trailer_youtube_url ="https://www.youtube.com/watch?v=s7EdQ4FqbhY",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BMTkxMTA5OTAzMl5BMl5BanBnXkFtZTgwNjA5MDc3NjE@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''The lives of two mob hit men, a boxer, a gangster's wife, and a pair of diner bandits
intertwine in four tales of violence and redemption''')
shawshank = Movie(title = "The Shawshank Redemption", trailer_youtube_url ="https://www.youtube.com/watch?v=KtwXlIwozog",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BODU4MjU4NjIwNl5BMl5BanBnXkFtZTgwMDU2MjEyMDE@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''Two imprisoned men bond over a number of years, finding solace
and eventual redemption through acts of common decency.''')
godfather = Movie(title = "The Godfather ", trailer_youtube_url ="https://www.youtube.com/watch?v=sY1S34973zA",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BMjEyMjcyNDI4MF5BMl5BanBnXkFtZTcwMDA5Mzg3OA@@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''The aging patriarch of an organized crime dynasty transfers control of his clandestine empire to his reluctant son.''')
dark_knight = Movie(title = "The Dark Knight ", trailer_youtube_url ="https://www.youtube.com/watch?v=EXeTwQWrcwY",
poster_image_url="https://images-na.ssl-images-amazon.com/images/M/MV5BMTMxNTMwODM0NF5BMl5BanBnXkFtZTcwODAyMTk2Mw@@._V1_UX182_CR0,0,182,268_AL_.jpg",
storyline='''Set within a year after the events of Batman Begins, Batman, Lieutenant James Gordon, and new district attorney Harvey Dent successfully begin to round up the criminals''')
movies=[toy_story,pulp_fiction,dark_knight,godfather,shawshank]
# Replace `Youtube URL` with just `Youtube video ID`
for movie in movies:
youtube_id_match = re.search(r'(?<=v=)[^&#]+', movie.trailer_youtube_url)
youtube_id_match = youtube_id_match or re.search(r'(?<=be/)[^&#]+', movie.trailer_youtube_url)
trailer_youtube_id = (youtube_id_match.group(0) if youtube_id_match else None)
movie.trailer_youtube_url = trailer_youtube_id
return render_template('index.html',
data=movies)
if __name__ == '__main__':
app.run(debug=True)
| mr-karan/Udacity-FullStack-ND004 | Project1/projects/movieServer/app.py | Python | mit | 2,980 | 0.034564 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'rbhusPipeSubmitRenderMod.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_rbhusSubmit(object):
def setupUi(self, rbhusSubmit):
rbhusSubmit.setObjectName(_fromUtf8("rbhusSubmit"))
rbhusSubmit.resize(572, 796)
self.centralwidget = QtGui.QWidget(rbhusSubmit)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.labelFrange = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelFrange.sizePolicy().hasHeightForWidth())
self.labelFrange.setSizePolicy(sizePolicy)
self.labelFrange.setObjectName(_fromUtf8("labelFrange"))
self.gridLayout.addWidget(self.labelFrange, 7, 0, 1, 1)
self.comboFileType = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboFileType.sizePolicy().hasHeightForWidth())
self.comboFileType.setSizePolicy(sizePolicy)
self.comboFileType.setObjectName(_fromUtf8("comboFileType"))
self.gridLayout.addWidget(self.comboFileType, 2, 1, 1, 1)
self.pushSelectHostGroups = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushSelectHostGroups.sizePolicy().hasHeightForWidth())
self.pushSelectHostGroups.setSizePolicy(sizePolicy)
self.pushSelectHostGroups.setObjectName(_fromUtf8("pushSelectHostGroups"))
self.gridLayout.addWidget(self.pushSelectHostGroups, 10, 2, 1, 1)
self.checkPngFlv = QtGui.QCheckBox(self.centralwidget)
self.checkPngFlv.setChecked(False)
self.checkPngFlv.setObjectName(_fromUtf8("checkPngFlv"))
self.gridLayout.addWidget(self.checkPngFlv, 24, 1, 1, 1)
self.lineEditAfterTask = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditAfterTask.sizePolicy().hasHeightForWidth())
self.lineEditAfterTask.setSizePolicy(sizePolicy)
self.lineEditAfterTask.setObjectName(_fromUtf8("lineEditAfterTask"))
self.gridLayout.addWidget(self.lineEditAfterTask, 16, 1, 1, 1)
self.labelFileName = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelFileName.sizePolicy().hasHeightForWidth())
self.labelFileName.setSizePolicy(sizePolicy)
self.labelFileName.setObjectName(_fromUtf8("labelFileName"))
self.gridLayout.addWidget(self.labelFileName, 1, 0, 1, 1)
self.labelPrio = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelPrio.sizePolicy().hasHeightForWidth())
self.labelPrio.setSizePolicy(sizePolicy)
self.labelPrio.setObjectName(_fromUtf8("labelPrio"))
self.gridLayout.addWidget(self.labelPrio, 18, 0, 1, 1)
self.labelImageName = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelImageName.sizePolicy().hasHeightForWidth())
self.labelImageName.setSizePolicy(sizePolicy)
self.labelImageName.setObjectName(_fromUtf8("labelImageName"))
self.gridLayout.addWidget(self.labelImageName, 5, 0, 1, 1)
self.comboRenderer = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboRenderer.sizePolicy().hasHeightForWidth())
self.comboRenderer.setSizePolicy(sizePolicy)
self.comboRenderer.setObjectName(_fromUtf8("comboRenderer"))
self.gridLayout.addWidget(self.comboRenderer, 12, 1, 1, 1)
self.labelAfterTime = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelAfterTime.sizePolicy().hasHeightForWidth())
self.labelAfterTime.setSizePolicy(sizePolicy)
self.labelAfterTime.setObjectName(_fromUtf8("labelAfterTime"))
self.gridLayout.addWidget(self.labelAfterTime, 17, 0, 1, 1)
self.pushSubmit = QtGui.QPushButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushSubmit.sizePolicy().hasHeightForWidth())
self.pushSubmit.setSizePolicy(sizePolicy)
self.pushSubmit.setObjectName(_fromUtf8("pushSubmit"))
self.gridLayout.addWidget(self.pushSubmit, 27, 2, 1, 1)
self.checkAfterTime = QtGui.QCheckBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkAfterTime.sizePolicy().hasHeightForWidth())
self.checkAfterTime.setSizePolicy(sizePolicy)
self.checkAfterTime.setObjectName(_fromUtf8("checkAfterTime"))
self.gridLayout.addWidget(self.checkAfterTime, 17, 2, 1, 1)
self.checkExrMov = QtGui.QCheckBox(self.centralwidget)
self.checkExrMov.setChecked(False)
self.checkExrMov.setObjectName(_fromUtf8("checkExrMov"))
self.gridLayout.addWidget(self.checkExrMov, 26, 1, 1, 1)
self.lineEditDescription = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditDescription.sizePolicy().hasHeightForWidth())
self.lineEditDescription.setSizePolicy(sizePolicy)
self.lineEditDescription.setDragEnabled(True)
self.lineEditDescription.setObjectName(_fromUtf8("lineEditDescription"))
self.gridLayout.addWidget(self.lineEditDescription, 23, 1, 1, 1)
self.labelBatching = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelBatching.sizePolicy().hasHeightForWidth())
self.labelBatching.setSizePolicy(sizePolicy)
self.labelBatching.setObjectName(_fromUtf8("labelBatching"))
self.gridLayout.addWidget(self.labelBatching, 20, 0, 1, 1)
self.labelUser = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelUser.sizePolicy().hasHeightForWidth())
self.labelUser.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setUnderline(False)
font.setWeight(75)
self.labelUser.setFont(font)
self.labelUser.setObjectName(_fromUtf8("labelUser"))
self.gridLayout.addWidget(self.labelUser, 0, 1, 1, 1)
self.afterTimeEdit = QtGui.QDateTimeEdit(self.centralwidget)
self.afterTimeEdit.setEnabled(False)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.afterTimeEdit.sizePolicy().hasHeightForWidth())
self.afterTimeEdit.setSizePolicy(sizePolicy)
self.afterTimeEdit.setTime(QtCore.QTime(14, 0, 0))
self.afterTimeEdit.setCurrentSection(QtGui.QDateTimeEdit.YearSection)
self.afterTimeEdit.setCalendarPopup(True)
self.afterTimeEdit.setObjectName(_fromUtf8("afterTimeEdit"))
self.gridLayout.addWidget(self.afterTimeEdit, 17, 1, 1, 1)
self.lineEditHostGroups = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditHostGroups.sizePolicy().hasHeightForWidth())
self.lineEditHostGroups.setSizePolicy(sizePolicy)
self.lineEditHostGroups.setReadOnly(True)
self.lineEditHostGroups.setObjectName(_fromUtf8("lineEditHostGroups"))
self.gridLayout.addWidget(self.lineEditHostGroups, 10, 1, 1, 1)
self.comboPrio = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboPrio.sizePolicy().hasHeightForWidth())
self.comboPrio.setSizePolicy(sizePolicy)
self.comboPrio.setObjectName(_fromUtf8("comboPrio"))
self.comboPrio.addItem(_fromUtf8(""))
self.comboPrio.addItem(_fromUtf8(""))
self.comboPrio.addItem(_fromUtf8(""))
self.gridLayout.addWidget(self.comboPrio, 18, 1, 1, 1)
self.lineEditOutName = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditOutName.sizePolicy().hasHeightForWidth())
self.lineEditOutName.setSizePolicy(sizePolicy)
self.lineEditOutName.setDragEnabled(True)
self.lineEditOutName.setReadOnly(False)
self.lineEditOutName.setObjectName(_fromUtf8("lineEditOutName"))
self.gridLayout.addWidget(self.lineEditOutName, 5, 1, 1, 1)
self.checkPngMP4 = QtGui.QCheckBox(self.centralwidget)
self.checkPngMP4.setChecked(False)
self.checkPngMP4.setObjectName(_fromUtf8("checkPngMP4"))
self.gridLayout.addWidget(self.checkPngMP4, 25, 1, 1, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.labelMinBatch = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelMinBatch.sizePolicy().hasHeightForWidth())
self.labelMinBatch.setSizePolicy(sizePolicy)
self.labelMinBatch.setObjectName(_fromUtf8("labelMinBatch"))
self.horizontalLayout_3.addWidget(self.labelMinBatch)
self.spinMinBatch = QtGui.QSpinBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinMinBatch.sizePolicy().hasHeightForWidth())
self.spinMinBatch.setSizePolicy(sizePolicy)
self.spinMinBatch.setMinimum(1)
self.spinMinBatch.setMaximum(999999999)
self.spinMinBatch.setObjectName(_fromUtf8("spinMinBatch"))
self.horizontalLayout_3.addWidget(self.spinMinBatch)
spacerItem = QtGui.QSpacerItem(30, 20, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem)
self.labelMaxBatch = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelMaxBatch.sizePolicy().hasHeightForWidth())
self.labelMaxBatch.setSizePolicy(sizePolicy)
self.labelMaxBatch.setObjectName(_fromUtf8("labelMaxBatch"))
self.horizontalLayout_3.addWidget(self.labelMaxBatch)
self.spinMaxBatch = QtGui.QSpinBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spinMaxBatch.sizePolicy().hasHeightForWidth())
self.spinMaxBatch.setSizePolicy(sizePolicy)
self.spinMaxBatch.setMinimum(1)
self.spinMaxBatch.setMaximum(999999999)
self.spinMaxBatch.setProperty("value", 2)
self.spinMaxBatch.setObjectName(_fromUtf8("spinMaxBatch"))
self.horizontalLayout_3.addWidget(self.spinMaxBatch)
self.gridLayout.addLayout(self.horizontalLayout_3, 20, 1, 1, 1)
self.labelDescription = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelDescription.sizePolicy().hasHeightForWidth())
self.labelDescription.setSizePolicy(sizePolicy)
self.labelDescription.setObjectName(_fromUtf8("labelDescription"))
self.gridLayout.addWidget(self.labelDescription, 23, 0, 1, 1)
self.labelAfterTask = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelAfterTask.sizePolicy().hasHeightForWidth())
self.labelAfterTask.setSizePolicy(sizePolicy)
self.labelAfterTask.setObjectName(_fromUtf8("labelAfterTask"))
self.gridLayout.addWidget(self.labelAfterTask, 16, 0, 1, 1)
self.lineEditLayer = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditLayer.sizePolicy().hasHeightForWidth())
self.lineEditLayer.setSizePolicy(sizePolicy)
self.lineEditLayer.setToolTip(_fromUtf8(""))
self.lineEditLayer.setObjectName(_fromUtf8("lineEditLayer"))
self.gridLayout.addWidget(self.lineEditLayer, 14, 1, 1, 1)
self.labelLayer = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelLayer.sizePolicy().hasHeightForWidth())
self.labelLayer.setSizePolicy(sizePolicy)
self.labelLayer.setObjectName(_fromUtf8("labelLayer"))
self.gridLayout.addWidget(self.labelLayer, 14, 0, 1, 1)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.comboRes = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboRes.sizePolicy().hasHeightForWidth())
self.comboRes.setSizePolicy(sizePolicy)
self.comboRes.setObjectName(_fromUtf8("comboRes"))
self.horizontalLayout.addWidget(self.comboRes)
self.lineEditRes = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditRes.sizePolicy().hasHeightForWidth())
self.lineEditRes.setSizePolicy(sizePolicy)
self.lineEditRes.setToolTip(_fromUtf8(""))
self.lineEditRes.setObjectName(_fromUtf8("lineEditRes"))
self.horizontalLayout.addWidget(self.lineEditRes)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.gridLayout.addLayout(self.horizontalLayout, 9, 1, 1, 1)
self.labelImageType = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelImageType.sizePolicy().hasHeightForWidth())
self.labelImageType.setSizePolicy(sizePolicy)
self.labelImageType.setObjectName(_fromUtf8("labelImageType"))
self.gridLayout.addWidget(self.labelImageType, 4, 0, 1, 1)
self.lineEditFrange = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditFrange.sizePolicy().hasHeightForWidth())
self.lineEditFrange.setSizePolicy(sizePolicy)
self.lineEditFrange.setToolTip(_fromUtf8(""))
self.lineEditFrange.setObjectName(_fromUtf8("lineEditFrange"))
self.gridLayout.addWidget(self.lineEditFrange, 7, 1, 1, 1)
self.labelOsType = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelOsType.sizePolicy().hasHeightForWidth())
self.labelOsType.setSizePolicy(sizePolicy)
self.labelOsType.setObjectName(_fromUtf8("labelOsType"))
self.gridLayout.addWidget(self.labelOsType, 15, 0, 1, 1)
self.checkSloppy = QtGui.QCheckBox(self.centralwidget)
self.checkSloppy.setObjectName(_fromUtf8("checkSloppy"))
self.gridLayout.addWidget(self.checkSloppy, 16, 2, 1, 1)
self.labelCamera = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelCamera.sizePolicy().hasHeightForWidth())
self.labelCamera.setSizePolicy(sizePolicy)
self.labelCamera.setObjectName(_fromUtf8("labelCamera"))
self.gridLayout.addWidget(self.labelCamera, 8, 0, 1, 1)
self.labelResolution = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelResolution.sizePolicy().hasHeightForWidth())
self.labelResolution.setSizePolicy(sizePolicy)
self.labelResolution.setObjectName(_fromUtf8("labelResolution"))
self.gridLayout.addWidget(self.labelResolution, 9, 0, 1, 1)
self.label = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.comboImageType = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboImageType.sizePolicy().hasHeightForWidth())
self.comboImageType.setSizePolicy(sizePolicy)
self.comboImageType.setObjectName(_fromUtf8("comboImageType"))
self.gridLayout.addWidget(self.comboImageType, 4, 1, 1, 1)
self.lineEditFileName = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditFileName.sizePolicy().hasHeightForWidth())
self.lineEditFileName.setSizePolicy(sizePolicy)
self.lineEditFileName.setToolTip(_fromUtf8(""))
self.lineEditFileName.setDragEnabled(True)
self.lineEditFileName.setObjectName(_fromUtf8("lineEditFileName"))
self.gridLayout.addWidget(self.lineEditFileName, 1, 1, 1, 1)
self.checkBatching = QtGui.QCheckBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBatching.sizePolicy().hasHeightForWidth())
self.checkBatching.setSizePolicy(sizePolicy)
self.checkBatching.setChecked(False)
self.checkBatching.setObjectName(_fromUtf8("checkBatching"))
self.gridLayout.addWidget(self.checkBatching, 20, 2, 1, 1)
self.labeFileType = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labeFileType.sizePolicy().hasHeightForWidth())
self.labeFileType.setSizePolicy(sizePolicy)
self.labeFileType.setObjectName(_fromUtf8("labeFileType"))
self.gridLayout.addWidget(self.labeFileType, 2, 0, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem2)
self.checkHold = QtGui.QCheckBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkHold.sizePolicy().hasHeightForWidth())
self.checkHold.setSizePolicy(sizePolicy)
self.checkHold.setLayoutDirection(QtCore.Qt.RightToLeft)
self.checkHold.setObjectName(_fromUtf8("checkHold"))
self.horizontalLayout_2.addWidget(self.checkHold)
self.gridLayout.addLayout(self.horizontalLayout_2, 27, 1, 1, 1)
self.labeRenderer = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labeRenderer.sizePolicy().hasHeightForWidth())
self.labeRenderer.setSizePolicy(sizePolicy)
self.labeRenderer.setObjectName(_fromUtf8("labeRenderer"))
self.gridLayout.addWidget(self.labeRenderer, 12, 0, 1, 1)
self.labelHostGroup = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.labelHostGroup.sizePolicy().hasHeightForWidth())
self.labelHostGroup.setSizePolicy(sizePolicy)
self.labelHostGroup.setObjectName(_fromUtf8("labelHostGroup"))
self.gridLayout.addWidget(self.labelHostGroup, 10, 0, 1, 1)
self.comboOsType = QtGui.QComboBox(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.comboOsType.sizePolicy().hasHeightForWidth())
self.comboOsType.setSizePolicy(sizePolicy)
self.comboOsType.setObjectName(_fromUtf8("comboOsType"))
self.gridLayout.addWidget(self.comboOsType, 15, 1, 1, 1)
self.lineEditCameras = QtGui.QLineEdit(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditCameras.sizePolicy().hasHeightForWidth())
self.lineEditCameras.setSizePolicy(sizePolicy)
self.lineEditCameras.setObjectName(_fromUtf8("lineEditCameras"))
self.gridLayout.addWidget(self.lineEditCameras, 8, 1, 1, 1)
rbhusSubmit.setCentralWidget(self.centralwidget)
self.retranslateUi(rbhusSubmit)
QtCore.QMetaObject.connectSlotsByName(rbhusSubmit)
def retranslateUi(self, rbhusSubmit):
rbhusSubmit.setWindowTitle(_translate("rbhusSubmit", "rbhusSubmit", None))
self.labelFrange.setText(_translate("rbhusSubmit", "fRange ", None))
self.pushSelectHostGroups.setText(_translate("rbhusSubmit", "select", None))
self.checkPngFlv.setText(_translate("rbhusSubmit", "convert png to FLV", None))
self.lineEditAfterTask.setText(_translate("rbhusSubmit", "0", None))
self.labelFileName.setText(_translate("rbhusSubmit", "fileName", None))
self.labelPrio.setText(_translate("rbhusSubmit", "priority", None))
self.labelImageName.setText(_translate("rbhusSubmit", "outName", None))
self.labelAfterTime.setText(_translate("rbhusSubmit", "afterTime ", None))
self.pushSubmit.setText(_translate("rbhusSubmit", "submit", None))
self.checkAfterTime.setText(_translate("rbhusSubmit", "enable", None))
self.checkExrMov.setText(_translate("rbhusSubmit", "convert exr to MOV", None))
self.lineEditDescription.setText(_translate("rbhusSubmit", "default", None))
self.labelBatching.setText(_translate("rbhusSubmit", "batching", None))
self.labelUser.setText(_translate("rbhusSubmit", "TextLabel", None))
self.afterTimeEdit.setDisplayFormat(_translate("rbhusSubmit", "yyyy-M-d h:mm A", None))
self.lineEditHostGroups.setToolTip(_translate("rbhusSubmit", "comma seperated list of cameras to render", None))
self.lineEditHostGroups.setText(_translate("rbhusSubmit", "default", None))
self.comboPrio.setItemText(0, _translate("rbhusSubmit", "normal", None))
self.comboPrio.setItemText(1, _translate("rbhusSubmit", "low", None))
self.comboPrio.setItemText(2, _translate("rbhusSubmit", "high", None))
self.lineEditOutName.setToolTip(_translate("rbhusSubmit", "name of the image file. eg: wtf.png", None))
self.lineEditOutName.setWhatsThis(_translate("rbhusSubmit", "<html><head/><body><p>name of the image file. eg: <span style=\" font-weight:600;\">wtfigo.png</span></p></body></html>", None))
self.lineEditOutName.setText(_translate("rbhusSubmit", "default", None))
self.checkPngMP4.setText(_translate("rbhusSubmit", "convert png to MP4", None))
self.labelMinBatch.setText(_translate("rbhusSubmit", "min", None))
self.labelMaxBatch.setText(_translate("rbhusSubmit", "max", None))
self.labelDescription.setText(_translate("rbhusSubmit", "description", None))
self.labelAfterTask.setText(_translate("rbhusSubmit", "afterTasks", None))
self.lineEditLayer.setText(_translate("rbhusSubmit", "default", None))
self.labelLayer.setText(_translate("rbhusSubmit", "layers", None))
self.lineEditRes.setWhatsThis(_translate("rbhusSubmit", "<html><head/><body><p>frame range in the format</p><p>startframe-endframe:byframes</p><p>eg:</p><p>render frames from 1 to 100 : <span style=\" font-weight:600;\">1-100</span></p><p>render every 5th frame from 1 to 100 : <span style=\" font-weight:600;\">1-100:5</span></p><p>render 1 frame :<span style=\" font-weight:600;\"> 1</span></p><p><br/></p><p><br/></p></body></html>", None))
self.lineEditRes.setText(_translate("rbhusSubmit", "default", None))
self.labelImageType.setText(_translate("rbhusSubmit", "imageType", None))
self.lineEditFrange.setWhatsThis(_translate("rbhusSubmit", "<html><head/><body><p>frame range in the format</p><p>startframe-endframe:byframes</p><p>eg:</p><p>render frames from 1 to 100 : <span style=\" font-weight:600;\">1-100</span></p><p>render every 5th frame from 1 to 100 : <span style=\" font-weight:600;\">1-100:5</span></p><p>render 1 frame :<span style=\" font-weight:600;\"> 1</span></p><p><br/></p><p><br/></p></body></html>", None))
self.lineEditFrange.setText(_translate("rbhusSubmit", "1", None))
self.labelOsType.setText(_translate("rbhusSubmit", "osType", None))
self.checkSloppy.setText(_translate("rbhusSubmit", "sloppy", None))
self.labelCamera.setText(_translate("rbhusSubmit", "cameras", None))
self.labelResolution.setText(_translate("rbhusSubmit", "resolution", None))
self.label.setText(_translate("rbhusSubmit", "USER", None))
self.lineEditFileName.setWhatsThis(_translate("rbhusSubmit", "comma seperated list of files to render", None))
self.checkBatching.setText(_translate("rbhusSubmit", "enable", None))
self.labeFileType.setText(_translate("rbhusSubmit", "fileType", None))
self.checkHold.setText(_translate("rbhusSubmit", "deactivate", None))
self.labeRenderer.setText(_translate("rbhusSubmit", "renderer", None))
self.labelHostGroup.setText(_translate("rbhusSubmit", "hostGroup", None))
self.lineEditCameras.setToolTip(_translate("rbhusSubmit", "comma seperated list of cameras to render", None))
self.lineEditCameras.setText(_translate("rbhusSubmit", "default", None))
| shrinidhi666/rbhus | rbhusUI/lib/rbhusPipeSubmitRenderMod.py | Python | gpl-3.0 | 30,076 | 0.003757 |
#std
import logging
#3rd
from gevent import Greenlet,sleep
#shaveet
from shaveet.config import MAX_CLIENTS_GC,CLIENT_GC_INTERVAL
from shaveet.lookup import all_clients,discard_client
logger = logging.getLogger("shaveet.gc")
class ClientGC(Greenlet):
"""
this greenthread collects the clients that are no longer active
"""
def run(self):
while True:
logger.info("ClientGC:processing clients")
client_processed = 0
for client_id,client in all_clients().iteritems():
if not client.is_active():
logger.debug("ClientGC:collecting id:%s,ts:%d,waiting:%s",client.id,client.ts,client.is_waiting)
discard_client(client)
#process in chuncks of MAX_CLIENTS_GC,sleep(0) means yeild to next greenlet
client_processed+=1
if client_processed % MAX_CLIENTS_GC == 0:
sleep(0)
logger.info("ClientGC:sleeping")
sleep(CLIENT_GC_INTERVAL) | urielka/shaveet | shaveet/gc.py | Python | mit | 925 | 0.027027 |
# Copyright 2015 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup
from setuptools import distutils
import os
import sys
def get_version_from_pkg_info():
metadata = distutils.dist.DistributionMetadata("PKG-INFO")
return metadata.version
def get_version_from_pyver():
try:
import pyver
except ImportError:
if 'sdist' in sys.argv or 'bdist_wheel' in sys.argv:
raise ImportError('You must install pyver to create a package')
else:
return 'noversion'
version, version_info = pyver.get_version(pkg="datalake_api",
public=True)
return version
def get_version():
if os.path.exists("PKG-INFO"):
return get_version_from_pkg_info()
else:
return get_version_from_pyver()
setup(name='datalake_api',
url='https://github.com/planetlabs/datalake-api',
version=get_version(),
description='datalake_api ingests datalake metadata records',
author='Brian Cavagnolo',
author_email='[email protected]',
packages=['datalake_api'],
install_requires=[
'pyver>=1.0.18',
'memoized_property>=1.0.2',
'simplejson>=3.3.1',
'datalake-common>=0.25',
'Flask>=0.10.1',
'flask-swagger==0.2.8',
'boto3==1.1.3',
'raven[flask]>=5.6.0',
'blinker>=1.4',
],
extras_require={
'test': [
'pytest==2.7.2',
'flake8==2.5.0',
'moto==0.4.23',
],
},
include_package_data=True)
| planetlabs/datalake-api | setup.py | Python | apache-2.0 | 2,140 | 0 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add fetch values predicate
Revision ID: 732f1c06bcbf
Revises: d6db5a5cdb5d
Create Date: 2017-03-03 09:15:56.800930
"""
# revision identifiers, used by Alembic.
revision = "732f1c06bcbf"
down_revision = "d6db5a5cdb5d"
import sqlalchemy as sa
from alembic import op
def upgrade():
op.add_column(
"datasources",
sa.Column("fetch_values_from", sa.String(length=100), nullable=True),
)
op.add_column(
"tables",
sa.Column("fetch_values_predicate", sa.String(length=1000), nullable=True),
)
def downgrade():
op.drop_column("tables", "fetch_values_predicate")
op.drop_column("datasources", "fetch_values_from")
| airbnb/superset | superset/migrations/versions/732f1c06bcbf_add_fetch_values_predicate.py | Python | apache-2.0 | 1,455 | 0.002062 |
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
import frappe
from frappe import _
from frappe.model.document import Document
from frappe.utils import flt, nowdate
import erpnext
from erpnext.accounts.doctype.journal_entry.journal_entry import get_default_bank_cash_account
from erpnext.hr.utils import validate_active_employee
class EmployeeAdvanceOverPayment(frappe.ValidationError):
pass
class EmployeeAdvance(Document):
def onload(self):
self.get("__onload").make_payment_via_journal_entry = frappe.db.get_single_value('Accounts Settings',
'make_payment_via_journal_entry')
def validate(self):
validate_active_employee(self.employee)
self.set_status()
def on_cancel(self):
self.ignore_linked_doctypes = ('GL Entry')
def set_status(self):
if self.docstatus == 0:
self.status = "Draft"
if self.docstatus == 1:
if self.claimed_amount and flt(self.claimed_amount) == flt(self.paid_amount):
self.status = "Claimed"
elif self.paid_amount and self.advance_amount == flt(self.paid_amount):
self.status = "Paid"
else:
self.status = "Unpaid"
elif self.docstatus == 2:
self.status = "Cancelled"
def set_total_advance_paid(self):
paid_amount = frappe.db.sql("""
select ifnull(sum(debit), 0) as paid_amount
from `tabGL Entry`
where against_voucher_type = 'Employee Advance'
and against_voucher = %s
and party_type = 'Employee'
and party = %s
""", (self.name, self.employee), as_dict=1)[0].paid_amount
return_amount = frappe.db.sql("""
select ifnull(sum(credit), 0) as return_amount
from `tabGL Entry`
where against_voucher_type = 'Employee Advance'
and voucher_type != 'Expense Claim'
and against_voucher = %s
and party_type = 'Employee'
and party = %s
""", (self.name, self.employee), as_dict=1)[0].return_amount
if paid_amount != 0:
paid_amount = flt(paid_amount) / flt(self.exchange_rate)
if return_amount != 0:
return_amount = flt(return_amount) / flt(self.exchange_rate)
if flt(paid_amount) > self.advance_amount:
frappe.throw(_("Row {0}# Paid Amount cannot be greater than requested advance amount"),
EmployeeAdvanceOverPayment)
if flt(return_amount) > self.paid_amount - self.claimed_amount:
frappe.throw(_("Return amount cannot be greater unclaimed amount"))
self.db_set("paid_amount", paid_amount)
self.db_set("return_amount", return_amount)
self.set_status()
frappe.db.set_value("Employee Advance", self.name , "status", self.status)
def update_claimed_amount(self):
claimed_amount = frappe.db.sql("""
SELECT sum(ifnull(allocated_amount, 0))
FROM `tabExpense Claim Advance` eca, `tabExpense Claim` ec
WHERE
eca.employee_advance = %s
AND ec.approval_status="Approved"
AND ec.name = eca.parent
AND ec.docstatus=1
AND eca.allocated_amount > 0
""", self.name)[0][0] or 0
frappe.db.set_value("Employee Advance", self.name, "claimed_amount", flt(claimed_amount))
self.reload()
self.set_status()
frappe.db.set_value("Employee Advance", self.name, "status", self.status)
@frappe.whitelist()
def get_pending_amount(employee, posting_date):
employee_due_amount = frappe.get_all("Employee Advance", \
filters = {"employee":employee, "docstatus":1, "posting_date":("<=", posting_date)}, \
fields = ["advance_amount", "paid_amount"])
return sum([(emp.advance_amount - emp.paid_amount) for emp in employee_due_amount])
@frappe.whitelist()
def make_bank_entry(dt, dn):
doc = frappe.get_doc(dt, dn)
payment_account = get_default_bank_cash_account(doc.company, account_type="Cash",
mode_of_payment=doc.mode_of_payment)
if not payment_account:
frappe.throw(_("Please set a Default Cash Account in Company defaults"))
advance_account_currency = frappe.db.get_value('Account', doc.advance_account, 'account_currency')
advance_amount, advance_exchange_rate = get_advance_amount_advance_exchange_rate(advance_account_currency,doc )
paying_amount, paying_exchange_rate = get_paying_amount_paying_exchange_rate(payment_account, doc)
je = frappe.new_doc("Journal Entry")
je.posting_date = nowdate()
je.voucher_type = 'Bank Entry'
je.company = doc.company
je.remark = 'Payment against Employee Advance: ' + dn + '\n' + doc.purpose
je.multi_currency = 1 if advance_account_currency != payment_account.account_currency else 0
je.append("accounts", {
"account": doc.advance_account,
"account_currency": advance_account_currency,
"exchange_rate": flt(advance_exchange_rate),
"debit_in_account_currency": flt(advance_amount),
"reference_type": "Employee Advance",
"reference_name": doc.name,
"party_type": "Employee",
"cost_center": erpnext.get_default_cost_center(doc.company),
"party": doc.employee,
"is_advance": "Yes"
})
je.append("accounts", {
"account": payment_account.account,
"cost_center": erpnext.get_default_cost_center(doc.company),
"credit_in_account_currency": flt(paying_amount),
"account_currency": payment_account.account_currency,
"account_type": payment_account.account_type,
"exchange_rate": flt(paying_exchange_rate)
})
return je.as_dict()
def get_advance_amount_advance_exchange_rate(advance_account_currency, doc):
if advance_account_currency != doc.currency:
advance_amount = flt(doc.advance_amount) * flt(doc.exchange_rate)
advance_exchange_rate = 1
else:
advance_amount = doc.advance_amount
advance_exchange_rate = doc.exchange_rate
return advance_amount, advance_exchange_rate
def get_paying_amount_paying_exchange_rate(payment_account, doc):
if payment_account.account_currency != doc.currency:
paying_amount = flt(doc.advance_amount) * flt(doc.exchange_rate)
paying_exchange_rate = 1
else:
paying_amount = doc.advance_amount
paying_exchange_rate = doc.exchange_rate
return paying_amount, paying_exchange_rate
@frappe.whitelist()
def create_return_through_additional_salary(doc):
import json
if isinstance(doc, str):
doc = frappe._dict(json.loads(doc))
additional_salary = frappe.new_doc('Additional Salary')
additional_salary.employee = doc.employee
additional_salary.currency = doc.currency
additional_salary.amount = doc.paid_amount - doc.claimed_amount
additional_salary.company = doc.company
additional_salary.ref_doctype = doc.doctype
additional_salary.ref_docname = doc.name
return additional_salary
@frappe.whitelist()
def make_return_entry(employee, company, employee_advance_name, return_amount, advance_account, currency, exchange_rate, mode_of_payment=None):
bank_cash_account = get_default_bank_cash_account(company, account_type='Cash', mode_of_payment = mode_of_payment)
if not bank_cash_account:
frappe.throw(_("Please set a Default Cash Account in Company defaults"))
advance_account_currency = frappe.db.get_value('Account', advance_account, 'account_currency')
je = frappe.new_doc('Journal Entry')
je.posting_date = nowdate()
je.voucher_type = get_voucher_type(mode_of_payment)
je.company = company
je.remark = 'Return against Employee Advance: ' + employee_advance_name
je.multi_currency = 1 if advance_account_currency != bank_cash_account.account_currency else 0
advance_account_amount = flt(return_amount) if advance_account_currency==currency \
else flt(return_amount) * flt(exchange_rate)
je.append('accounts', {
'account': advance_account,
'credit_in_account_currency': advance_account_amount,
'account_currency': advance_account_currency,
'exchange_rate': flt(exchange_rate) if advance_account_currency == currency else 1,
'reference_type': 'Employee Advance',
'reference_name': employee_advance_name,
'party_type': 'Employee',
'party': employee,
'is_advance': 'Yes'
})
bank_amount = flt(return_amount) if bank_cash_account.account_currency==currency \
else flt(return_amount) * flt(exchange_rate)
je.append("accounts", {
"account": bank_cash_account.account,
"debit_in_account_currency": bank_amount,
"account_currency": bank_cash_account.account_currency,
"account_type": bank_cash_account.account_type,
"exchange_rate": flt(exchange_rate) if bank_cash_account.account_currency == currency else 1
})
return je.as_dict()
def get_voucher_type(mode_of_payment=None):
voucher_type = "Cash Entry"
if mode_of_payment:
mode_of_payment_type = frappe.get_cached_value('Mode of Payment', mode_of_payment, 'type')
if mode_of_payment_type == "Bank":
voucher_type = "Bank Entry"
return voucher_type
| mhbu50/erpnext | erpnext/hr/doctype/employee_advance/employee_advance.py | Python | gpl-3.0 | 8,463 | 0.025996 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib,urllib2,re,xbmcplugin,xbmcgui,sys,xbmcaddon
pluginhandle = int(sys.argv[1])
settings = xbmcaddon.Addon(id='plugin.video.dtm_tv')
translation = settings.getLocalizedString
language=""
language=settings.getSetting("language")
if language=="":
settings.openSettings()
language=settings.getSetting("language")
if language=="0":
language="DE"
elif language=="1":
language="EN"
def cleanTitle(title):
return title.replace("\\u00c4","Ä").replace("\\u00e4","ä").replace("\\u00d6","Ö").replace("\\u00f6","ö").replace("\\u00dc","Ü").replace("\\u00fc","ü").replace("\\u00df","ß").strip()
def index():
addDir(translation(30001),"LATEST","listVideos","")
addDir(translation(30002),"MOST_VIEWED","listVideos","")
addDir(translation(30003),"BEST_RATED","listVideos","")
addDir(translation(30004),"SEARCH","listVideos","")
xbmcplugin.endOfDirectory(pluginhandle)
def listVideos(url):
values = {}
if url=="LATEST":
values = {'string':'*',
'lang':language,
'page':'1',
'order':'date'}
elif url=="MOST_VIEWED":
values = {'string':'*',
'lang':language,
'page':'1',
'order':'views'}
elif url=="BEST_RATED":
values = {'string':'*',
'lang':language,
'page':'1',
'order':'ranking'}
elif url=="SEARCH":
keyboard = xbmc.Keyboard('', translation(30004))
keyboard.doModal()
if keyboard.isConfirmed() and keyboard.getText():
search_string = keyboard.getText()
values = {'string':search_string,
'lang':language,
'page':'1',
'order':'date'}
if len(values)>0:
data = urllib.urlencode(values)
listVideosMain(data)
def listVideosMain(url):
content = getUrl("http://www.dtm.tv/Daten/getSearchData",data=url)
spl=content.split('{"id":')
for i in range(1,len(spl),1):
entry=spl[i]
match=re.compile('"bild":"(.+?)"', re.DOTALL).findall(entry)
thumb="http://www.dtm.tv/media/images/"+match[0]
match=re.compile('"publishdate":"(.+?)"', re.DOTALL).findall(entry)
date=match[0]
match=re.compile('"title":"(.+?)"', re.DOTALL).findall(entry)
title=date+" - "+cleanTitle(match[0])
urls=[]
match=re.compile('"url1":"(.+?)"', re.DOTALL).findall(entry)
if len(match)==1:
urls.append(match[0].replace("\\",""))
match=re.compile('"url2":"(.+?)"', re.DOTALL).findall(entry)
if len(match)==1:
urls.append(match[0].replace("\\",""))
match=re.compile('"url3":"(.+?)"', re.DOTALL).findall(entry)
if len(match)==1:
urls.append(match[0].replace("\\",""))
urlNew=""
for urlTemp in urls:
if urlTemp.find("_HD.mp4")>=0:
urlNew=urlTemp
elif urlTemp.find("_SD.mp4")>=0:
if urlNew=="":
urlNew=urlTemp
elif urlTemp.find(".flv")>=0:
if urlNew=="":
urlNew=urlTemp
addLink(title,urlNew,'playVideo',thumb)
match=re.compile('"nextPage":(.+?),', re.DOTALL).findall(content)
if len(match)==1:
dataNext=url[:url.find("page=")+5]+match[0]
temp=url[url.find("page=")+5:]
if temp.find("&")>=0:
dataNext=dataNext+url[:url.find("&")+1]
addDir("Next Page ("+str(match[0])+")",dataNext,"listVideosMain","")
xbmcplugin.endOfDirectory(pluginhandle)
def playVideo(url):
listitem = xbmcgui.ListItem(path=url)
return xbmcplugin.setResolvedUrl(pluginhandle, True, listitem)
def getUrl(url,data=None,cookie=None):
if data!=None:
req = urllib2.Request(url,data)
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
else:
req = urllib2.Request(url)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:11.0) Gecko/20100101 Firefox/11.0')
if cookie!=None:
req.add_header('Cookie',cookie)
response = urllib2.urlopen(req,timeout=30)
link=response.read()
response.close()
return link
def parameters_string_to_dict(parameters):
''' Convert parameters encoded in a URL to a dict. '''
paramDict = {}
if parameters:
paramPairs = parameters[1:].split("&")
for paramsPair in paramPairs:
paramSplits = paramsPair.split('=')
if (len(paramSplits)) == 2:
paramDict[paramSplits[0]] = paramSplits[1]
return paramDict
def addLink(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultVideo.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
liz.setProperty('IsPlayable', 'true')
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz)
return ok
def addDir(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name } )
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
params=parameters_string_to_dict(sys.argv[2])
mode=params.get('mode')
url=params.get('url')
if type(url)==type(str()):
url=urllib.unquote_plus(url)
if mode == 'listVideos':
listVideos(url)
elif mode == 'playVideo':
playVideo(url)
elif mode == 'listVideosMain':
listVideosMain(url)
else:
index()
| AddonScriptorDE/plugin.video.dtm_tv | default.py | Python | gpl-2.0 | 6,186 | 0.032368 |
import discord
import re
import urllib.request
import xml.etree.ElementTree as ET
radio = {}
radioNames = {}
radioWhosPlaying = {}
radioNowPlaying = ''
playerStatus = 0
defaultChannel = ''
voice = ''
async def botWhatIsPlaying(client, message):
if playerStatus is 0:
await client.send_message(message.channel, 'А ничего и не играет.')
else:
if radioNowPlaying in radioWhosPlaying:
print('Getting whos playing for' + radioNowPlaying)
src = radioWhosPlaying[radioNowPlaying]
response = urllib.request.urlopen(src[0])
html = response.read()
codec = response.info().get_param('charset', 'utf8')
html = html.decode(codec)
p = re.compile(src[1])
result = re.search(p, html)
if result is not None:
gr = result.groups()
if len(gr) is 3:
msg = "{:s} - {:s} ({:s})".format(gr[0], gr[1], gr[2])
elif len(gr) is 2:
msg = "{:s} - {:s}".format(gr[0], gr[1])
else:
msg = 'Ляляля играет. Я хз'
await client.send_message(message.channel, msg)
else:
await client.send_message(message.channel, 'Не знаю что играет.')
else:
await client.send_message(message.channel,
'Информация не доступна для этой станции')
async def botJoinVoiceChannel(client, message):
print(message)
if client.is_voice_connected(message.server):
await client.send_message(message.channel,
'Я уже в голосовом канале.')
channel_name = defaultChannel
print('Trying to join: %s' % (channel_name))
check = lambda c: c.name == channel_name and c.type == discord.ChannelType.voice
channel = discord.utils.find(check, message.server.channels)
if channel is None:
await client.send_message(message.channel,
'Не могу найти канал с таким названием.')
else:
global voice
voice = await client.join_voice_channel(channel)
client.starter = message.author
async def botStop(client, message):
global playerStatus
client.player.stop()
client.loop.create_task(client.change_status())
playerStatus = 0
async def botPlayRadio(client, message):
global playerStatus
global radioNowPlaying
if not client.is_voice_connected(message.server):
await botJoinVoiceChannel(client, message)
if(playerStatus is not 0):
print('Have to stop Radio first')
print('PlayerStatus: ' + str(playerStatus))
client.player.stop()
radioNowPlaying = ''
station = message.content[1:]
#Handle special short cuts (desired by discord members)
if station is '1':
print("Debug: piterFM")
station = 'piterfm'
elif station is '2':
station = 'nashe'
elif station is '3':
print("Debug: chanson")
station = 'chanson'
if station in radio:
radioUrl = radio[station]
print('Starting to play Radio Station: '+radioNames[station])
client.player = voice.create_ffmpeg_player(radioUrl)
client.player.start()
radioNowPlaying = station
playerStatus = 1
game = discord.Game(name=radioNames[station])
client.loop.create_task(client.change_status(game))
else:
print('No such station in list.')
commands = {
'!bot': botJoinVoiceChannel,
'!1': botPlayRadio,
'!2': botPlayRadio,
'!3': botPlayRadio,
'!0': botStop,
'!stop': botStop,
'!a': botWhatIsPlaying,
}
def load(config):
global radio
global radioNames
global radioWhosPlaying
global defaultChannel
# Open radio config and populate the command list, radio URL list and
# radio name list.
# configFile = open('cfg/radio.cfg').readlines()
# for line in configFile:
# tmp = line.split(', ')
# radio[tmp[0]] = tmp[1].rstrip('\n')
# radioNames[tmp[0]] = tmp[2].rstrip('\n')
# commands['!'+tmp[0]] = botPlayRadio
# radioWhosPlaying[tmp[0]] = [tmp[3], tmp[4].rstrip('\n')]
defaultChannel = config.getDefaultChannel()
data = open('cfg/radio.xml').read()
root = ET.fromstring(data)
for station in root:
cmd = station.find('command').text
name = station.get('name')
strURL = station.find('streamURL').text
nowURL = station.find('nowPlayingURL').text
nowRE = station.find('nowPlayingRE').text
radio[cmd] = strURL.strip(' \t\n')
radioNames[cmd] = name.strip('\n')
commands['!'+cmd] = botPlayRadio
# If we have now playing settings available
if(nowURL is not None and nowRE is not None):
radioWhosPlaying[cmd] = [nowURL.strip(' \n\t'), nowRE.strip(' \t\n')]
return commands
def getName():
return 'onlineRadio'
def getDescription():
return 'Plays online radio stations found in cfg/radio.cfg.'
| mephasor/mephaBot | addons/onlineRadio.py | Python | gpl-3.0 | 5,202 | 0.002169 |
# coding: utf-8
"""参数验证相关工具
"""
import re
import ujson
import types
import numbers
from girlfriend.util.lang import args2fields
from girlfriend.exception import InvalidArgumentException
class Rule(object):
"""描述参数验证规则,并执行验证过程
"""
@args2fields()
def __init__(self, name,
type=None,
required=False, min=None, max=None,
regex=None, logic=None, default=None):
"""
:param name 参数名称,通常用于错误提示
:param required 如果为True,那么参数是必须的
:param min 如果是字符串,那么该参数为最小长度(等于此长度合法),
如果是数字(numbers.Number类型),那么为该参数最小值(等于此值算合法)
:param max 同上
:param regex 正则验证
:param type 类型验证,多个参数可以传递元组
:param logic 谓词函数,满足更加复杂的业务验证需要,比如查查数据库邮箱是否存在等等
该谓词函数并非返回True和False,如果有错误,那么返回错误消息的字符串,
如果没有错误,那么直接返回None
:param default 该项的默认值
"""
pass
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def required(self):
return self._required
def validate(self, value):
"""执行验证
:param value 要验证的值
"""
if self._required and self._is_empty(value):
raise InvalidArgumentException(
u"参数 '{}' 的值是必须的,不能为空".format(self._name))
# 如果非必须并且为空,那么接下来的验证就不必运行了
if self._is_empty(value):
return
# 检查类型
self._validate_type(value)
# 检查大小、长度
self._validate_min_max(value)
# 检查正则
self._validate_regex(value)
# 检查逻辑
self._validate_logic(value)
def _validate_type(self, value):
if not self._type:
return
if not isinstance(value, self._type):
raise InvalidArgumentException(
u"参数 '{name}' 的类型不正确,只允许以下类型:{types}".format(
name=self._name,
types=self._type
)
)
def _validate_min_max(self, value):
if self._min is not None:
if isinstance(value, numbers.Number):
if self._min > value:
raise InvalidArgumentException(
u"参数 '{name}' 的值不能小于{min}".format(
name=self._name, min=self._min)
)
else:
if self._min > len(value):
raise InvalidArgumentException(
u"参数 '{name}' 的长度不能小于{min}".format(
name=self._name, min=self._min)
)
if self._max is not None:
if isinstance(value, numbers.Number):
if self._max < value:
raise InvalidArgumentException(
u"参数 '{name}' 的值不能大于{max}".format(
name=self._name, max=self._max)
)
else:
if self._max < len(value):
raise InvalidArgumentException(
u"参数 '{name}' 的长度不能大于{max}".format(
name=self._name, max=self._max)
)
def _validate_regex(self, value):
if not self._regex:
return
value = str(value)
if not re.search(self._regex, value):
raise InvalidArgumentException(
u"参数 '{name}' 不符合正则表达式'{regex}'".format(
name=self._name, regex=self._regex)
)
def _validate_logic(self, value):
if self._logic is None:
return
msg = self._logic(value)
if msg:
raise InvalidArgumentException(msg)
def _is_empty(self, value):
"""判断一个值是否为空
如果值为None,那么返回True
如果值为空字符串,那么返回True
如果值为0, 那么不算空,返回False
"""
if value is None:
return True
if isinstance(value, types.StringType) and not value:
return True
return False
def be_json(name):
def _be_json(value):
try:
ujson.loads(value)
except:
return u"参数 '{}' 必须是json格式".format(name)
return _be_json
| chihongze/girlfriend | girlfriend/util/validating.py | Python | mit | 4,999 | 0.000236 |
import logging
from sen.docker_backend import DockerContainer, RootImage
from sen.exceptions import NotifyError
from sen.tui.commands.base import Command
from sen.tui.views.disk_usage import DfBufferView
from sen.tui.views.help import HelpBufferView, HelpCommandView
from sen.tui.views.main import MainListBox
from sen.tui.views.image_info import ImageInfoWidget
from sen.tui.views.container_info import ContainerInfoView
from sen.tui.widgets.list.common import AsyncScrollableListBox, ScrollableListBox
from sen.tui.widgets.list.util import get_operation_notify_widget
from sen.tui.widgets.tree import ImageTree
logger = logging.getLogger(__name__)
class Buffer:
"""
base buffer class
"""
name = None # unique identifier
description = None # for help
display_name = None # display in status bar
widget = None # display this in main frame
# global keybinds which will be available in every buffer
global_keybinds = {
# navigation
"home": "navigate-top",
"gg": "navigate-top",
"end": "navigate-bottom",
"G": "navigate-bottom",
"down": "navigate-down",
"j": "navigate-down",
"up": "navigate-up",
"k": "navigate-up",
"ctrl d": "navigate-downwards",
"ctrl u": "navigate-upwards",
# UI
":": "prompt",
"/": "prompt prompt-text=\"\" initial-text=\"/\"",
"n": "search-next",
"N": "search-previous",
"f4": "prompt initial-text=\"filter \"",
"x": "kill-buffer",
"q": "kill-buffer quit-if-no-buffer",
"ctrl i": "select-next-buffer",
"ctrl o": "select-previous-buffer",
"h": "help",
"?": "help",
"f5": "layers",
}
# buffer specific keybinds
keybinds = {}
def __init__(self):
logger.debug("creating buffer %r", self)
self._keybinds = None # cache
self.refresh()
def __repr__(self):
return "{}(name={!r}, widget={!r})".format(
self.__class__.__name__, self.display_name, self.widget)
def destroy(self):
destroy_method = getattr(self.widget, "destroy", None)
if destroy_method:
destroy_method()
def find_previous(self, s=None):
logger.debug("searching next %r in %r", s, self.__class__.__name__)
try:
self.widget.find_previous(s)
except AttributeError as ex:
logger.debug(repr(ex))
raise NotifyError("Can't search in this buffer.")
def find_next(self, s=None):
logger.debug("searching next %r in %r", s, self.__class__.__name__)
try:
self.widget.find_next(s)
except AttributeError as ex:
logger.debug(repr(ex))
raise NotifyError("Can't search in this buffer.")
def build_status_bar(self):
status_bar = getattr(self.widget, "status_bar", None)
if status_bar:
return status_bar()
def filter(self, s):
logger.debug("filter widget %r with query %r", self.widget, s)
self.widget.filter(s)
def get_keybinds(self):
if self._keybinds is None:
self._keybinds = {}
self._keybinds.update(self.global_keybinds)
self._keybinds.update(self.keybinds)
return self._keybinds
def refresh(self):
refresh_func = getattr(self.widget, "refresh", None)
if refresh_func:
logger.info("refreshing widget %s", self.widget)
refresh_func()
def process_realtime_event(self, event):
logger.info("buffer %s doesn't process realtime events", self)
return
class ImageInfoBuffer(Buffer):
description = "Dashboard for information about selected image.\n" + \
"You can run command `df` to get more detailed info about disk usage."
keybinds = {
"enter": "display-info",
"d": "rm",
"i": "inspect",
"@": "refresh",
}
def __init__(self, docker_image, ui):
"""
:param docker_image:
:param ui: ui object so we refresh
"""
if isinstance(docker_image, RootImage):
raise NotifyError("Image \"scratch\" doesn't provide any more information.")
if docker_image.image_id == "<missing>":
raise NotifyError("This image (layer) is not available due to changes in docker-1.10 "
"image representation.")
self.docker_image = docker_image
self.display_name = docker_image.short_name
self.widget = ImageInfoWidget(ui, docker_image)
super().__init__()
def process_realtime_event(self, event):
if event.get("id", None) == self.docker_image.object_id:
self.widget.refresh()
class ContainerInfoBuffer(Buffer):
description = "Detailed info about selected container presented in a slick dashboard."
keybinds = {
"enter": "display-info",
"@": "refresh",
"i": "inspect",
}
def __init__(self, docker_container, ui):
"""
:param docker_container:
:param ui: ui object so we refresh
"""
self.docker_container = docker_container
self.display_name = docker_container.short_name
self.widget = ContainerInfoView(ui, docker_container)
super().__init__()
def process_realtime_event(self, event):
action = event.get("Action", None)
if action == "top":
return
if event.get("id", None) == self.docker_container.object_id:
self.widget.refresh()
class TreeBuffer(Buffer):
display_name = "Layers"
description = "Tree view of all layers available on your docker engine."
keybinds = {
"enter": "display-info",
}
def __init__(self, ui, docker_backend):
self.widget = ImageTree(ui, docker_backend)
super().__init__()
class MainListBuffer(Buffer):
display_name = "Listing"
description = "List of all known docker images and containers display in a single list"
keybinds = {
"d": "rm",
"D": "rm -f",
"s": "start",
"t": "stop",
"r": "restart",
"X": "kill",
"p": "pause",
"u": "unpause",
"enter": "display-info",
"b": "open-browser",
"l": "logs",
"f": "logs -f",
"i": "inspect",
"!": "toggle-live-updates", # TODO: rfe: move to global so this affects every buffer
"@": "refresh", # FIXME: move to global and refactor & rewrite
}
def __init__(self, ui, docker_backend):
self.ui = ui
self.widget = MainListBox(ui, docker_backend)
super().__init__()
def process_realtime_event(self, event):
self.widget.process_realtime_event(event)
class LogsBuffer(Buffer):
description = "Display logs of selected container."
display_name = "Logs "
def __init__(self, ui, docker_object, follow=False):
"""
:param docker_object: container to display logs
:param ui: ui object so we can refresh
"""
self.display_name += "({})".format(docker_object.short_name)
if isinstance(docker_object, DockerContainer):
try:
pre_message = "Getting logs for container {}...".format(docker_object.short_name)
ui.notify_message(pre_message)
if follow:
# FIXME: this is a bit race-y -- we might lose some logs with this approach
operation = docker_object.logs(follow=follow, lines=0)
static_data = docker_object.logs(follow=False).response
self.widget = AsyncScrollableListBox(operation.response, ui, static_data=static_data)
else:
operation = docker_object.logs(follow=follow)
self.widget = ScrollableListBox(ui, operation.response)
ui.remove_notification_message(pre_message)
ui.notify_widget(get_operation_notify_widget(operation, display_always=False))
except Exception as ex:
# FIXME: let's catch 404 and print that container doesn't exist
# instead of printing ugly HTTP error
raise NotifyError("Error getting logs for container %s: %r" % (docker_object, ex))
else:
raise NotifyError("Only containers have logs.")
super().__init__()
class InspectBuffer(Buffer):
display_name = "Inspect "
description = "Display all the information docker knows about selected object: " + \
"same output as `docker inspect`."
def __init__(self, ui, docker_object):
"""
:param docker_object: object to inspect
"""
self.docker_object = docker_object
self.ui = ui
self.widget = None
self.display_name += docker_object.short_name
super().__init__()
def refresh(self):
inspect_data = self.docker_object.display_inspect()
self.widget = ScrollableListBox(self.ui, inspect_data)
def process_realtime_event(self, event):
if event.get("id", None) == self.docker_object.object_id:
self.ui.notify_message("Docker object changed, refreshing.")
focus = self.widget.get_focus()[1]
self.widget.set_text(self.docker_object.display_inspect())
self.widget.set_focus(focus)
class HelpBuffer(Buffer):
# TODO: apply this interface to other buffers: create views
description = "Show information about currently displayed buffer and " + \
"what keybindings are available there"
display_name = "Help"
def __init__(self, ui, inp):
"""
display buffer with more info about object 'inp'
:param ui: UI instance
:param inp: Buffer, Command instance
"""
self.ui = ui
if isinstance(inp, Buffer):
self.display_name += "({})".format(inp.display_name)
self.widget = HelpBufferView(ui, inp, self.global_keybinds)
elif isinstance(inp, Command):
self.display_name += "({})".format(inp.name)
self.widget = HelpCommandView(ui, inp)
super().__init__()
class DfBuffer(Buffer):
description = "Show information about how much disk space container, images and volumes take."
display_name = "Disk Usage"
def __init__(self, ui):
"""
:param ui: UI instance
"""
self.ui = ui
self.widget = DfBufferView(ui, self)
super().__init__()
def refresh(self, df=None, containers=None, images=None):
self.widget.refresh(df=df, containers=containers, images=images)
| TomasTomecek/sen | sen/tui/buffer.py | Python | mit | 10,717 | 0.001306 |
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import codecs
from datetime import datetime
import hashlib
import os
import time
try:
from unittest import mock
except ImportError:
import mock
from azure.core.exceptions import AzureError, HttpResponseError
from azure.core.pipeline.policies import SansIOHTTPPolicy
from azure.keyvault.keys import ApiVersion, JsonWebKey, KeyCurveName, KeyOperation, KeyVaultKey
from azure.keyvault.keys.crypto import CryptographyClient, EncryptionAlgorithm, KeyWrapAlgorithm, SignatureAlgorithm
from azure.keyvault.keys.crypto._key_validity import _UTC
from azure.keyvault.keys.crypto._providers import NoLocalCryptography, get_local_cryptography_provider
from azure.mgmt.keyvault.models import KeyPermissions, Permissions
import pytest
from _shared.json_attribute_matcher import json_attribute_matcher
from _shared.test_case import KeyVaultTestCase
from _test_case import client_setup, get_decorator, KeysTestCase
# without keys/get, a CryptographyClient created with a key ID performs all ops remotely
NO_GET = Permissions(keys=[p.value for p in KeyPermissions if p.value != "get"])
all_api_versions = get_decorator()
only_hsm = get_decorator(only_hsm=True)
no_get = get_decorator(permissions=NO_GET)
class CryptoClientTests(KeysTestCase, KeyVaultTestCase):
def __init__(self, *args, **kwargs):
kwargs["match_body"] = False
kwargs["custom_request_matchers"] = [json_attribute_matcher]
super(CryptoClientTests, self).__init__(*args, **kwargs)
plaintext = b"5063e6aaa845f150200547944fd199679c98ed6f99da0a0b2dafeaf1f4684496fd532c1c229968cb9dee44957fcef7ccef59ceda0b362e56bcd78fd3faee5781c623c0bb22b35beabde0664fd30e0e824aba3dd1b0afffc4a3d955ede20cf6a854d52cfd"
iv = codecs.decode("89b8adbfb07345e3598932a09c517441", "hex_codec")
aad = b"test"
def _create_rsa_key(self, client, key_name, **kwargs):
key_ops = kwargs.get("key_operations") or ["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"]
hsm = kwargs.get("hardware_protected") or False
if self.is_live:
time.sleep(2) # to avoid throttling by the service
created_key = client.create_rsa_key(key_name, **kwargs)
kty = "RSA-HSM" if hsm else "RSA"
self._validate_rsa_key_bundle(created_key, client.vault_url, key_name, kty, key_ops)
return created_key
def _create_ec_key(self, client, key_name, **kwargs):
key_curve = kwargs.get("curve") or "P-256"
hsm = kwargs.get("hardware_protected") or False
if self.is_live:
time.sleep(2) # to avoid throttling by the service
created_key = client.create_ec_key(key_name, **kwargs)
key_type = "EC-HSM" if hsm else "EC"
self._validate_ec_key_bundle(key_curve, created_key, client.vault_url, key_name, key_type)
return created_key
def _validate_rsa_key_bundle(self, key_attributes, vault, key_name, kty, key_ops):
prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name])
key = key_attributes.key
kid = key_attributes.id
self.assertTrue(kid.index(prefix) == 0, "Key Id should start with '{}', but value is '{}'".format(prefix, kid))
self.assertEqual(key.kty, kty, "kty should by '{}', but is '{}'".format(key, key.kty))
self.assertTrue(key.n and key.e, "Bad RSA public material.")
self.assertEqual(
sorted(key_ops), sorted(key.key_ops), "keyOps should be '{}', but is '{}'".format(key_ops, key.key_ops)
)
self.assertTrue(
key_attributes.properties.created_on and key_attributes.properties.updated_on,
"Missing required date attributes.",
)
def _validate_ec_key_bundle(self, key_curve, key_attributes, vault, key_name, kty):
prefix = "/".join(s.strip("/") for s in [vault, "keys", key_name])
key = key_attributes.key
kid = key_attributes.id
self.assertEqual(key_curve, key.crv)
self.assertTrue(kid.index(prefix) == 0, "Key Id should start with '{}', but value is '{}'".format(prefix, kid))
self.assertEqual(key.kty, kty, "kty should by '{}', but is '{}'".format(key, key.kty))
self.assertTrue(
key_attributes.properties.created_on and key_attributes.properties.updated_on,
"Missing required date attributes.",
)
def _import_test_key(self, client, name, hardware_protected=False):
def _to_bytes(hex):
if len(hex) % 2:
hex = "0{}".format(hex)
return codecs.decode(hex, "hex_codec")
key = JsonWebKey(
kty="RSA-HSM" if hardware_protected else "RSA",
key_ops=["encrypt", "decrypt", "sign", "verify", "wrapKey", "unwrapKey"],
n=_to_bytes(
"00a0914d00234ac683b21b4c15d5bed887bdc959c2e57af54ae734e8f00720d775d275e455207e3784ceeb60a50a4655dd72a7a94d271e8ee8f7959a669ca6e775bf0e23badae991b4529d978528b4bd90521d32dd2656796ba82b6bbfc7668c8f5eeb5053747fd199319d29a8440d08f4412d527ff9311eda71825920b47b1c46b11ab3e91d7316407e89c7f340f7b85a34042ce51743b27d4718403d34c7b438af6181be05e4d11eb985d38253d7fe9bf53fc2f1b002d22d2d793fa79a504b6ab42d0492804d7071d727a06cf3a8893aa542b1503f832b296371b6707d4dc6e372f8fe67d8ded1c908fde45ce03bc086a71487fa75e43aa0e0679aa0d20efe35"
),
e=_to_bytes("10001"),
d=_to_bytes(
"627c7d24668148fe2252c7fa649ea8a5a9ed44d75c766cda42b29b660e99404f0e862d4561a6c95af6a83d213e0a2244b03cd28576473215073785fb067f015da19084ade9f475e08b040a9a2c7ba00253bb8125508c9df140b75161d266be347a5e0f6900fe1d8bbf78ccc25eeb37e0c9d188d6e1fc15169ba4fe12276193d77790d2326928bd60d0d01d6ead8d6ac4861abadceec95358fd6689c50a1671a4a936d2376440a41445501da4e74bfb98f823bd19c45b94eb01d98fc0d2f284507f018ebd929b8180dbe6381fdd434bffb7800aaabdd973d55f9eaf9bb88a6ea7b28c2a80231e72de1ad244826d665582c2362761019de2e9f10cb8bcc2625649"
),
p=_to_bytes(
"00d1deac8d68ddd2c1fd52d5999655b2cf1565260de5269e43fd2a85f39280e1708ffff0682166cb6106ee5ea5e9ffd9f98d0becc9ff2cda2febc97259215ad84b9051e563e14a051dce438bc6541a24ac4f014cf9732d36ebfc1e61a00d82cbe412090f7793cfbd4b7605be133dfc3991f7e1bed5786f337de5036fc1e2df4cf3"
),
q=_to_bytes(
"00c3dc66b641a9b73cd833bc439cd34fc6574465ab5b7e8a92d32595a224d56d911e74624225b48c15a670282a51c40d1dad4bc2e9a3c8dab0c76f10052dfb053bc6ed42c65288a8e8bace7a8881184323f94d7db17ea6dfba651218f931a93b8f738f3d8fd3f6ba218d35b96861a0f584b0ab88ddcf446b9815f4d287d83a3237"
),
dp=_to_bytes(
"00c9a159be7265cbbabc9afcc4967eb74fe58a4c4945431902d1142da599b760e03838f8cbd26b64324fea6bdc9338503f459793636e59b5361d1e6951e08ddb089e1b507be952a81fbeaf7e76890ea4f536e25505c3f648b1e88377dfc19b4c304e738dfca07211b792286a392a704d0f444c0a802539110b7f1f121c00cff0a9"
),
dq=_to_bytes(
"00a0bd4c0a3d9f64436a082374b5caf2488bac1568696153a6a5e4cd85d186db31e2f58f024c617d29f37b4e6b54c97a1e25efec59c4d1fd3061ac33509ce8cae5c11f4cd2e83f41a8264f785e78dc0996076ee23dfdfc43d67c463afaa0180c4a718357f9a6f270d542479a0f213870e661fb950abca4a14ca290570ba7983347"
),
qi=_to_bytes(
"009fe7ae42e92bc04fcd5780464bd21d0c8ac0c599f9af020fde6ab0a7e7d1d39902f5d8fb6c614184c4c1b103fb46e94cd10a6c8a40f9991a1f28269f326435b6c50276fda6493353c650a833f724d80c7d522ba16c79f0eb61f672736b68fb8be3243d10943c4ab7028d09e76cfb5892222e38bc4d35585bf35a88cd68c73b07"
),
)
imported_key = client.import_key(name, key)
self._validate_rsa_key_bundle(imported_key, client.vault_url, name, key.kty, key.key_ops)
return imported_key
def _import_symmetric_test_key(self, client, name):
key_material = codecs.decode("e27ed0c84512bbd55b6af434d237c11feba311870f80f2c2e3364260f31c82c8", "hex_codec")
key = JsonWebKey(
kty="oct-HSM",
key_ops=["encrypt", "decrypt", "wrapKey", "unwrapKey"],
k=key_material,
)
imported_key = client.import_key(name, key) # the key material isn't returned by the service
key.kid = imported_key.id
key_vault_key = KeyVaultKey(key_id=imported_key.id, jwk=vars(key)) # create a key containing the material
assert key_vault_key.key.k == key_material
assert key_vault_key.key.kid == imported_key.id == key_vault_key.id
return key_vault_key
@all_api_versions()
@client_setup
def test_ec_key_id(self, key_client, is_hsm, **kwargs):
"""When initialized with a key ID, the client should retrieve the key and perform public operations locally"""
key = self._create_ec_key(key_client, self.get_resource_name("eckey"), hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key.id, api_version=key_client.api_version)
crypto_client._initialize()
assert crypto_client.key_id == key.id
# ensure all remote crypto operations will fail
crypto_client._client = None
crypto_client.verify(SignatureAlgorithm.es256_k, hashlib.sha256(self.plaintext).digest(), self.plaintext)
@all_api_versions()
@client_setup
def test_rsa_key_id(self, key_client, is_hsm, **kwargs):
"""When initialized with a key ID, the client should retrieve the key and perform public operations locally"""
key = self._create_rsa_key(key_client, self.get_resource_name("rsakey"), hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key.id, api_version=key_client.api_version)
crypto_client._initialize()
assert crypto_client.key_id == key.id
# ensure all remote crypto operations will fail
crypto_client._client = None
crypto_client.encrypt(EncryptionAlgorithm.rsa_oaep, self.plaintext)
crypto_client.verify(SignatureAlgorithm.rs256, hashlib.sha256(self.plaintext).digest(), self.plaintext)
crypto_client.wrap_key(KeyWrapAlgorithm.rsa_oaep, self.plaintext)
@no_get()
@client_setup
def test_encrypt_and_decrypt(self, key_client, is_hsm, **kwargs):
key_name = self.get_resource_name("keycrypt")
imported_key = self._import_test_key(key_client, key_name, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(imported_key.id, api_version=key_client.api_version)
result = crypto_client.encrypt(EncryptionAlgorithm.rsa_oaep, self.plaintext)
self.assertEqual(result.key_id, imported_key.id)
result = crypto_client.decrypt(result.algorithm, result.ciphertext)
self.assertEqual(result.key_id, imported_key.id)
self.assertEqual(EncryptionAlgorithm.rsa_oaep, result.algorithm)
self.assertEqual(self.plaintext, result.plaintext)
@no_get()
@client_setup
def test_sign_and_verify(self, key_client, is_hsm, **kwargs):
key_name = self.get_resource_name("keysign")
md = hashlib.sha256()
md.update(self.plaintext)
digest = md.digest()
imported_key = self._import_test_key(key_client, key_name, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(imported_key.id, api_version=key_client.api_version)
result = crypto_client.sign(SignatureAlgorithm.rs256, digest)
self.assertEqual(result.key_id, imported_key.id)
verified = crypto_client.verify(result.algorithm, digest, result.signature)
self.assertEqual(result.key_id, imported_key.id)
self.assertEqual(result.algorithm, SignatureAlgorithm.rs256)
self.assertTrue(verified.is_valid)
@no_get()
@client_setup
def test_wrap_and_unwrap(self, key_client, is_hsm, **kwargs):
key_name = self.get_resource_name("keywrap")
created_key = self._create_rsa_key(key_client, key_name, hardware_protected=is_hsm)
self.assertIsNotNone(created_key)
crypto_client = self.create_crypto_client(created_key.id, api_version=key_client.api_version)
# Wrap a key with the created key, then unwrap it. The wrapped key's bytes should round-trip.
key_bytes = self.plaintext
result = crypto_client.wrap_key(KeyWrapAlgorithm.rsa_oaep, key_bytes)
self.assertEqual(result.key_id, created_key.id)
result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key)
self.assertEqual(key_bytes, result.key)
@only_hsm()
@client_setup
def test_symmetric_encrypt_and_decrypt(self, key_client, **kwargs):
"""Encrypt and decrypt with the service"""
key_name = self.get_resource_name("symmetric-encrypt")
imported_key = self._import_symmetric_test_key(key_client, key_name)
assert imported_key is not None
crypto_client = self.create_crypto_client(imported_key, api_version=key_client.api_version)
# Use 256-bit AES algorithms for the 256-bit key
symmetric_algorithms = [algorithm for algorithm in EncryptionAlgorithm if algorithm.startswith("A256")]
supports_nothing = mock.Mock(supports=mock.Mock(return_value=False))
with mock.patch(crypto_client.__module__ + ".get_local_cryptography_provider", lambda *_: supports_nothing):
for algorithm in symmetric_algorithms:
if algorithm.endswith("GCM"):
encrypt_result = crypto_client.encrypt(
algorithm, self.plaintext, additional_authenticated_data=self.aad
)
assert encrypt_result.key_id == imported_key.id
decrypt_result = crypto_client.decrypt(
encrypt_result.algorithm,
encrypt_result.ciphertext,
iv=encrypt_result.iv,
authentication_tag=encrypt_result.tag,
additional_authenticated_data=self.aad
)
else:
encrypt_result = crypto_client.encrypt(
algorithm, self.plaintext, iv=self.iv, additional_authenticated_data=self.aad
)
assert encrypt_result.key_id == imported_key.id
decrypt_result = crypto_client.decrypt(
encrypt_result.algorithm,
encrypt_result.ciphertext,
iv=encrypt_result.iv,
additional_authenticated_data=self.aad
)
assert decrypt_result.key_id == imported_key.id
assert decrypt_result.algorithm == algorithm
if algorithm.endswith("CBC"):
assert decrypt_result.plaintext.startswith(self.plaintext) # AES-CBC returns a zero-padded plaintext
else:
assert decrypt_result.plaintext == self.plaintext
@only_hsm()
@client_setup
def test_symmetric_wrap_and_unwrap(self, key_client, **kwargs):
key_name = self.get_resource_name("symmetric-kw")
imported_key = self._import_symmetric_test_key(key_client, key_name)
assert imported_key is not None
crypto_client = self.create_crypto_client(imported_key.id, api_version=key_client.api_version)
result = crypto_client.wrap_key(KeyWrapAlgorithm.aes_256, self.plaintext)
assert result.key_id == imported_key.id
result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key)
assert result.key == self.plaintext
@all_api_versions()
@client_setup
def test_encrypt_local(self, key_client, is_hsm, **kwargs):
"""Encrypt locally, decrypt with Key Vault"""
key_name = self.get_resource_name("encrypt-local")
key = self._create_rsa_key(key_client, key_name, size=4096, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
rsa_encrypt_algorithms = [algorithm for algorithm in EncryptionAlgorithm if algorithm.startswith("RSA")]
for encrypt_algorithm in rsa_encrypt_algorithms:
result = crypto_client.encrypt(encrypt_algorithm, self.plaintext)
self.assertEqual(result.key_id, key.id)
result = crypto_client.decrypt(result.algorithm, result.ciphertext)
self.assertEqual(result.plaintext, self.plaintext)
@all_api_versions()
@client_setup
def test_encrypt_local_from_jwk(self, key_client, is_hsm, **kwargs):
"""Encrypt locally, decrypt with Key Vault"""
key_name = self.get_resource_name("encrypt-local")
key = self._create_rsa_key(key_client, key_name, size=4096, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
local_client = CryptographyClient.from_jwk(key.key)
rsa_encrypt_algorithms = [algorithm for algorithm in EncryptionAlgorithm if algorithm.startswith("RSA")]
for encrypt_algorithm in rsa_encrypt_algorithms:
result = local_client.encrypt(encrypt_algorithm, self.plaintext)
self.assertEqual(result.key_id, key.id)
result = crypto_client.decrypt(result.algorithm, result.ciphertext)
self.assertEqual(result.plaintext, self.plaintext)
@only_hsm()
@client_setup
def test_symmetric_encrypt_local(self, key_client, **kwargs):
"""Encrypt locally, decrypt with the service"""
key_name = self.get_resource_name("symmetric-encrypt")
imported_key = self._import_symmetric_test_key(key_client, key_name)
assert imported_key is not None
crypto_client = self.create_crypto_client(imported_key, api_version=key_client.api_version)
# Use 256-bit AES-CBCPAD for the 256-bit key (only AES-CBCPAD is implemented locally)
algorithm = EncryptionAlgorithm.a256_cbcpad
crypto_client._local_provider = get_local_cryptography_provider(imported_key.key)
encrypt_result = crypto_client.encrypt(
algorithm, self.plaintext, iv=self.iv, additional_authenticated_data=self.aad
)
assert encrypt_result.key_id == imported_key.id
crypto_client._local_provider = NoLocalCryptography()
decrypt_result = crypto_client.decrypt(
encrypt_result.algorithm,
encrypt_result.ciphertext,
iv=encrypt_result.iv,
additional_authenticated_data=self.aad
)
assert decrypt_result.key_id == imported_key.id
assert decrypt_result.algorithm == algorithm
assert decrypt_result.plaintext == self.plaintext
@only_hsm()
@client_setup
def test_symmetric_decrypt_local(self, key_client, **kwargs):
"""Encrypt with the service, decrypt locally"""
key_name = self.get_resource_name("symmetric-encrypt")
imported_key = self._import_symmetric_test_key(key_client, key_name)
assert imported_key is not None
crypto_client = self.create_crypto_client(imported_key, api_version=key_client.api_version)
# Use 256-bit AES-CBCPAD for the 256-bit key (only AES-CBCPAD is implemented locally)
algorithm = EncryptionAlgorithm.a256_cbcpad
crypto_client._initialized = True
crypto_client._local_provider = NoLocalCryptography()
encrypt_result = crypto_client.encrypt(
algorithm, self.plaintext, iv=self.iv, additional_authenticated_data=self.aad
)
assert encrypt_result.key_id == imported_key.id
crypto_client._local_provider = get_local_cryptography_provider(imported_key.key)
decrypt_result = crypto_client.decrypt(
encrypt_result.algorithm,
encrypt_result.ciphertext,
iv=encrypt_result.iv,
additional_authenticated_data=self.aad
)
assert decrypt_result.key_id == imported_key.id
assert decrypt_result.algorithm == algorithm
assert decrypt_result.plaintext == self.plaintext
@all_api_versions()
@client_setup
def test_wrap_local(self, key_client, is_hsm, **kwargs):
"""Wrap locally, unwrap with Key Vault"""
key_name = self.get_resource_name("wrap-local")
key = self._create_rsa_key(key_client, key_name, size=4096, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
for wrap_algorithm in (algorithm for algorithm in KeyWrapAlgorithm if algorithm.startswith("RSA")):
result = crypto_client.wrap_key(wrap_algorithm, self.plaintext)
self.assertEqual(result.key_id, key.id)
result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key)
self.assertEqual(result.key, self.plaintext)
@all_api_versions()
@client_setup
def test_wrap_local_from_jwk(self, key_client, is_hsm, **kwargs):
"""Wrap locally, unwrap with Key Vault"""
key_name = self.get_resource_name("wrap-local")
key = self._create_rsa_key(key_client, key_name, size=4096, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
local_client = CryptographyClient.from_jwk(key.key)
for wrap_algorithm in (algorithm for algorithm in KeyWrapAlgorithm if algorithm.startswith("RSA")):
result = local_client.wrap_key(wrap_algorithm, self.plaintext)
self.assertEqual(result.key_id, key.id)
result = crypto_client.unwrap_key(result.algorithm, result.encrypted_key)
self.assertEqual(result.key, self.plaintext)
@all_api_versions()
@client_setup
def test_rsa_verify_local(self, key_client, is_hsm, **kwargs):
"""Sign with Key Vault, verify locally"""
for size in (2048, 3072, 4096):
key_name = self.get_resource_name("rsa-verify-{}".format(size))
key = self._create_rsa_key(key_client, key_name, size=size, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
for signature_algorithm, hash_function in (
(SignatureAlgorithm.ps256, hashlib.sha256),
(SignatureAlgorithm.ps384, hashlib.sha384),
(SignatureAlgorithm.ps512, hashlib.sha512),
(SignatureAlgorithm.rs256, hashlib.sha256),
(SignatureAlgorithm.rs384, hashlib.sha384),
(SignatureAlgorithm.rs512, hashlib.sha512),
):
digest = hash_function(self.plaintext).digest()
result = crypto_client.sign(signature_algorithm, digest)
self.assertEqual(result.key_id, key.id)
result = crypto_client.verify(result.algorithm, digest, result.signature)
self.assertTrue(result.is_valid)
@all_api_versions()
@client_setup
def test_rsa_verify_local_from_jwk(self, key_client, is_hsm, **kwargs):
"""Sign with Key Vault, verify locally"""
for size in (2048, 3072, 4096):
key_name = self.get_resource_name("rsa-verify-{}".format(size))
key = self._create_rsa_key(key_client, key_name, size=size, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
local_client = CryptographyClient.from_jwk(key.key)
for signature_algorithm, hash_function in (
(SignatureAlgorithm.ps256, hashlib.sha256),
(SignatureAlgorithm.ps384, hashlib.sha384),
(SignatureAlgorithm.ps512, hashlib.sha512),
(SignatureAlgorithm.rs256, hashlib.sha256),
(SignatureAlgorithm.rs384, hashlib.sha384),
(SignatureAlgorithm.rs512, hashlib.sha512),
):
digest = hash_function(self.plaintext).digest()
result = crypto_client.sign(signature_algorithm, digest)
self.assertEqual(result.key_id, key.id)
result = local_client.verify(result.algorithm, digest, result.signature)
self.assertTrue(result.is_valid)
@all_api_versions()
@client_setup
def test_ec_verify_local(self, key_client, is_hsm, **kwargs):
"""Sign with Key Vault, verify locally"""
matrix = {
KeyCurveName.p_256: (SignatureAlgorithm.es256, hashlib.sha256),
KeyCurveName.p_256_k: (SignatureAlgorithm.es256_k, hashlib.sha256),
KeyCurveName.p_384: (SignatureAlgorithm.es384, hashlib.sha384),
KeyCurveName.p_521: (SignatureAlgorithm.es512, hashlib.sha512),
}
for curve, (signature_algorithm, hash_function) in sorted(matrix.items()):
key_name = self.get_resource_name("ec-verify-{}".format(curve.value))
key = self._create_ec_key(key_client, key_name, curve=curve, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
digest = hash_function(self.plaintext).digest()
result = crypto_client.sign(signature_algorithm, digest)
self.assertEqual(result.key_id, key.id)
result = crypto_client.verify(result.algorithm, digest, result.signature)
self.assertTrue(result.is_valid)
@all_api_versions()
@client_setup
def test_ec_verify_local_from_jwk(self, key_client, is_hsm, **kwargs):
"""Sign with Key Vault, verify locally"""
matrix = {
KeyCurveName.p_256: (SignatureAlgorithm.es256, hashlib.sha256),
KeyCurveName.p_256_k: (SignatureAlgorithm.es256_k, hashlib.sha256),
KeyCurveName.p_384: (SignatureAlgorithm.es384, hashlib.sha384),
KeyCurveName.p_521: (SignatureAlgorithm.es512, hashlib.sha512),
}
for curve, (signature_algorithm, hash_function) in sorted(matrix.items()):
key_name = self.get_resource_name("ec-verify-{}".format(curve.value))
key = self._create_ec_key(key_client, key_name, curve=curve, hardware_protected=is_hsm)
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
local_client = CryptographyClient.from_jwk(key.key)
digest = hash_function(self.plaintext).digest()
result = crypto_client.sign(signature_algorithm, digest)
self.assertEqual(result.key_id, key.id)
result = local_client.verify(result.algorithm, digest, result.signature)
self.assertTrue(result.is_valid)
@all_api_versions()
@client_setup
def test_local_validity_period_enforcement(self, key_client, is_hsm, **kwargs):
"""Local crypto operations should respect a key's nbf and exp properties"""
def test_operations(key, expected_error_substrings, encrypt_algorithms, wrap_algorithms):
crypto_client = self.create_crypto_client(key, api_version=key_client.api_version)
for algorithm in encrypt_algorithms:
with pytest.raises(ValueError) as ex:
crypto_client.encrypt(algorithm, self.plaintext)
for substring in expected_error_substrings:
assert substring in str(ex.value)
for algorithm in wrap_algorithms:
with pytest.raises(ValueError) as ex:
crypto_client.wrap_key(algorithm, self.plaintext)
for substring in expected_error_substrings:
assert substring in str(ex.value)
# operations should not succeed with a key whose nbf is in the future
the_year_3000 = datetime(3000, 1, 1, tzinfo=_UTC)
rsa_wrap_algorithms = [algorithm for algorithm in KeyWrapAlgorithm if algorithm.startswith("RSA")]
rsa_encryption_algorithms = [algorithm for algorithm in EncryptionAlgorithm if algorithm.startswith("RSA")]
key_name = self.get_resource_name("rsa-not-yet-valid")
not_yet_valid_key = self._create_rsa_key(
key_client, key_name, not_before=the_year_3000, hardware_protected=is_hsm
)
test_operations(not_yet_valid_key, [str(the_year_3000)], rsa_encryption_algorithms, rsa_wrap_algorithms)
# nor should they succeed with a key whose exp has passed
the_year_2000 = datetime(2000, 1, 1, tzinfo=_UTC)
key_name = self.get_resource_name("rsa-expired")
expired_key = self._create_rsa_key(key_client, key_name, expires_on=the_year_2000, hardware_protected=is_hsm)
test_operations(expired_key, [str(the_year_2000)], rsa_encryption_algorithms, rsa_wrap_algorithms)
# when exp and nbf are set, error messages should contain both
the_year_3001 = datetime(3001, 1, 1, tzinfo=_UTC)
key_name = self.get_resource_name("rsa-valid")
valid_key = self._create_rsa_key(
key_client, key_name, not_before=the_year_3000, expires_on=the_year_3001, hardware_protected=is_hsm
)
test_operations(
valid_key, (str(the_year_3000), str(the_year_3001)), rsa_encryption_algorithms, rsa_wrap_algorithms
)
def test_custom_hook_policy():
class CustomHookPolicy(SansIOHTTPPolicy):
pass
client = CryptographyClient("https://localhost/fake/key/version", object(), custom_hook_policy=CustomHookPolicy())
assert isinstance(client._client._config.custom_hook_policy, CustomHookPolicy)
def test_initialization_given_key():
"""If the client is given key material, it should not attempt to get this from the vault"""
mock_client = mock.Mock()
key = mock.Mock(spec=KeyVaultKey, id="https://localhost/fake/key/version")
client = CryptographyClient(key, mock.Mock())
client._client = mock_client
with mock.patch(CryptographyClient.__module__ + ".get_local_cryptography_provider") as get_provider:
client.verify(SignatureAlgorithm.rs256, b"...", b"...")
get_provider.assert_called_once_with(key.key)
assert mock_client.get_key.call_count == 0
def test_initialization_get_key_successful():
"""If the client is able to get key material, it shouldn't do so again"""
key_id = "https://localhost/fake/key/version"
mock_key = mock.Mock()
mock_key.key.kid = key_id
mock_client = mock.Mock()
mock_client.get_key.return_value = mock_key
client = CryptographyClient(key_id, mock.Mock())
client._client = mock_client
assert mock_client.get_key.call_count == 0
with mock.patch(CryptographyClient.__module__ + ".get_local_cryptography_provider") as get_provider:
client.verify(SignatureAlgorithm.rs256, b"...", b"...")
args, _ = get_provider.call_args
assert len(args) == 1 and isinstance(args[0], JsonWebKey) and args[0].kid == key_id
for _ in range(3):
assert mock_client.get_key.call_count == 1
assert get_provider.call_count == 1
client.verify(SignatureAlgorithm.rs256, b"...", b"...")
def test_initialization_forbidden_to_get_key():
"""If the client is forbidden to get key material, it should try to do so exactly once"""
mock_client = mock.Mock()
mock_client.get_key.side_effect = HttpResponseError(response=mock.Mock(status_code=403))
client = CryptographyClient("https://localhost/fake/key/version", mock.Mock())
client._client = mock_client
assert mock_client.get_key.call_count == 0
for _ in range(3):
client.verify(SignatureAlgorithm.rs256, b"...", b"...")
assert mock_client.get_key.call_count == 1
def test_initialization_transient_failure_getting_key():
"""If the client is not forbidden to get key material, it should retry after failing to do so"""
mock_client = mock.Mock()
mock_client.get_key.side_effect = HttpResponseError(response=mock.Mock(status_code=500))
client = CryptographyClient("https://localhost/fake/key/version", mock.Mock())
client._client = mock_client
for i in range(3):
assert mock_client.get_key.call_count == i
client.verify(SignatureAlgorithm.rs256, b"...", b"...")
def test_calls_service_for_operations_unsupported_locally():
"""When an operation can't be performed locally, the client should request Key Vault perform it"""
mock_client = mock.Mock()
key = mock.Mock(spec=KeyVaultKey, id="https://localhost/fake/key/version")
client = CryptographyClient(key, mock.Mock())
client._client = mock_client
supports_nothing = mock.Mock(supports=mock.Mock(return_value=False))
with mock.patch(CryptographyClient.__module__ + ".get_local_cryptography_provider", lambda *_: supports_nothing):
client.decrypt(EncryptionAlgorithm.rsa_oaep, b"...")
assert mock_client.decrypt.call_count == 1
assert supports_nothing.decrypt.call_count == 0
client.encrypt(EncryptionAlgorithm.rsa_oaep, b"...")
assert mock_client.encrypt.call_count == 1
assert supports_nothing.encrypt.call_count == 0
client.sign(SignatureAlgorithm.rs256, b"...")
assert mock_client.sign.call_count == 1
assert supports_nothing.sign.call_count == 0
client.verify(SignatureAlgorithm.rs256, b"...", b"...")
assert mock_client.verify.call_count == 1
assert supports_nothing.verify.call_count == 0
client.unwrap_key(KeyWrapAlgorithm.rsa_oaep, b"...")
assert mock_client.unwrap_key.call_count == 1
assert supports_nothing.unwrap_key.call_count == 0
client.wrap_key(KeyWrapAlgorithm.rsa_oaep, b"...")
assert mock_client.wrap_key.call_count == 1
assert supports_nothing.wrap_key.call_count == 0
def test_local_only_mode_no_service_calls():
"""A local-only CryptographyClient shouldn't call the service if an operation can't be performed locally"""
mock_client = mock.Mock()
jwk = JsonWebKey(kty="RSA", key_ops=[], n=b"10011", e=b"10001")
client = CryptographyClient.from_jwk(jwk=jwk)
client._client = mock_client
with pytest.raises(NotImplementedError):
client.decrypt(EncryptionAlgorithm.rsa_oaep, b"...")
assert mock_client.decrypt.call_count == 0
with pytest.raises(NotImplementedError):
client.encrypt(EncryptionAlgorithm.a256_gcm, b"...")
assert mock_client.encrypt.call_count == 0
with pytest.raises(NotImplementedError):
client.sign(SignatureAlgorithm.rs256, b"...")
assert mock_client.sign.call_count == 0
with pytest.raises(NotImplementedError):
client.verify(SignatureAlgorithm.es256, b"...", b"...")
assert mock_client.verify.call_count == 0
with pytest.raises(NotImplementedError):
client.unwrap_key(KeyWrapAlgorithm.rsa_oaep, b"...")
assert mock_client.unwrap_key.call_count == 0
with pytest.raises(NotImplementedError):
client.wrap_key(KeyWrapAlgorithm.aes_256, b"...")
assert mock_client.wrap_key.call_count == 0
def test_local_only_mode_raise():
"""A local-only CryptographyClient should raise an exception if an operation can't be performed locally"""
jwk = {"kty":"RSA", "key_ops":["decrypt", "verify", "unwrapKey"], "n":b"10011", "e":b"10001"}
client = CryptographyClient.from_jwk(jwk=jwk)
# Algorithm not supported locally
with pytest.raises(NotImplementedError) as ex:
client.decrypt(EncryptionAlgorithm.a256_gcm, b"...", iv=b"...", authentication_tag=b"...")
assert EncryptionAlgorithm.a256_gcm in str(ex.value)
assert KeyOperation.decrypt in str(ex.value)
# Operation not included in JWK permissions
with pytest.raises(AzureError) as ex:
client.encrypt(EncryptionAlgorithm.rsa_oaep, b"...")
assert KeyOperation.encrypt in str(ex.value)
# Algorithm not supported locally
with pytest.raises(NotImplementedError) as ex:
client.verify(SignatureAlgorithm.es256, b"...", b"...")
assert SignatureAlgorithm.es256 in str(ex.value)
assert KeyOperation.verify in str(ex.value)
# Algorithm not supported locally, and operation not included in JWK permissions
with pytest.raises(NotImplementedError) as ex:
client.sign(SignatureAlgorithm.rs256, b"...")
assert SignatureAlgorithm.rs256 in str(ex.value)
assert KeyOperation.sign in str(ex.value)
# Algorithm not supported locally
with pytest.raises(NotImplementedError) as ex:
client.unwrap_key(KeyWrapAlgorithm.aes_256, b"...")
assert KeyWrapAlgorithm.aes_256 in str(ex.value)
assert KeyOperation.unwrap_key in str(ex.value)
# Operation not included in JWK permissions
with pytest.raises(AzureError) as ex:
client.wrap_key(KeyWrapAlgorithm.rsa_oaep, b"...")
assert KeyOperation.wrap_key in str(ex.value)
def test_prefers_local_provider():
"""The client should complete operations locally whenever possible"""
mock_client = mock.Mock()
key = mock.Mock(
spec=KeyVaultKey,
id="https://localhost/fake/key/version",
properties=mock.Mock(
not_before=datetime(2000, 1, 1, tzinfo=_UTC), expires_on=datetime(3000, 1, 1, tzinfo=_UTC)
),
)
client = CryptographyClient(key, mock.Mock())
client._client = mock_client
supports_everything = mock.Mock(supports=mock.Mock(return_value=True))
with mock.patch(CryptographyClient.__module__ + ".get_local_cryptography_provider", lambda *_: supports_everything):
client.decrypt(EncryptionAlgorithm.rsa_oaep, b"...")
assert mock_client.decrypt.call_count == 0
assert supports_everything.decrypt.call_count == 1
client.encrypt(EncryptionAlgorithm.rsa_oaep, b"...")
assert mock_client.encrypt.call_count == 0
assert supports_everything.encrypt.call_count == 1
client.sign(SignatureAlgorithm.rs256, b"...")
assert mock_client.sign.call_count == 0
assert supports_everything.sign.call_count == 1
client.verify(SignatureAlgorithm.rs256, b"...", b"...")
assert mock_client.verify.call_count == 0
assert supports_everything.verify.call_count == 1
client.unwrap_key(KeyWrapAlgorithm.rsa_oaep, b"...")
assert mock_client.unwrap_key.call_count == 0
assert supports_everything.unwrap_key.call_count == 1
client.wrap_key(KeyWrapAlgorithm.rsa_oaep, b"...")
assert mock_client.wrap_key.call_count == 0
assert supports_everything.wrap_key.call_count == 1
def test_aes_cbc_key_size_validation():
"""The client should raise an error when the key is an inappropriate size for the specified algorithm"""
jwk = JsonWebKey(kty="oct-HSM", key_ops=["encrypt", "decrypt"], k=os.urandom(64))
iv = os.urandom(16)
client = CryptographyClient.from_jwk(jwk=jwk)
with pytest.raises(AzureError) as ex:
client.encrypt(EncryptionAlgorithm.a128_cbcpad, b"...", iv=iv) # requires 16-byte key
assert "key size" in str(ex.value).lower()
with pytest.raises(AzureError) as ex:
client.encrypt(EncryptionAlgorithm.a192_cbcpad, b"...", iv=iv) # requires 24-byte key
assert "key size" in str(ex.value).lower()
with pytest.raises(AzureError) as ex:
client.encrypt(EncryptionAlgorithm.a256_cbcpad, b"...", iv=iv) # requires 32-byte key
assert "key size" in str(ex.value).lower()
def test_aes_cbc_iv_validation():
"""The client should raise an error when an iv is not provided"""
jwk = JsonWebKey(kty="oct-HSM", key_ops=["encrypt", "decrypt"], k=os.urandom(32))
client = CryptographyClient.from_jwk(jwk=jwk)
with pytest.raises(ValueError) as ex:
client.encrypt(EncryptionAlgorithm.a256_cbcpad, b"...")
assert "iv" in str(ex.value).lower()
def test_encrypt_argument_validation():
"""The client should raise an error when arguments don't work with the specified algorithm"""
mock_client = mock.Mock()
key = mock.Mock(
spec=KeyVaultKey,
id="https://localhost/fake/key/version",
properties=mock.Mock(
not_before=datetime(2000, 1, 1, tzinfo=_UTC), expires_on=datetime(3000, 1, 1, tzinfo=_UTC)
),
)
client = CryptographyClient(key, mock.Mock())
client._client = mock_client
with pytest.raises(ValueError) as ex:
client.encrypt(EncryptionAlgorithm.rsa_oaep, b"...", iv=b"...")
assert "iv" in str(ex.value)
with pytest.raises(ValueError) as ex:
client.encrypt(EncryptionAlgorithm.rsa_oaep, b"...", additional_authenticated_data=b"...")
assert "additional_authenticated_data" in str(ex.value)
with pytest.raises(ValueError) as ex:
client.encrypt(EncryptionAlgorithm.a256_cbc, b"...")
assert "iv" in str(ex.value) and "required" in str(ex.value)
def test_decrypt_argument_validation():
mock_client = mock.Mock()
key = mock.Mock(
spec=KeyVaultKey,
id="https://localhost/fake/key/version",
properties=mock.Mock(
not_before=datetime(2000, 1, 1, tzinfo=_UTC), expires_on=datetime(3000, 1, 1, tzinfo=_UTC)
),
)
client = CryptographyClient(key, mock.Mock())
client._client = mock_client
with pytest.raises(ValueError) as ex:
client.decrypt(EncryptionAlgorithm.rsa_oaep, b"...", iv=b"...")
assert "iv" in str(ex.value)
with pytest.raises(ValueError) as ex:
client.decrypt(EncryptionAlgorithm.rsa_oaep, b"...", additional_authenticated_data=b"...")
assert "additional_authenticated_data" in str(ex.value)
with pytest.raises(ValueError) as ex:
client.decrypt(EncryptionAlgorithm.rsa_oaep, b"...", authentication_tag=b"...")
assert "authentication_tag" in str(ex.value)
with pytest.raises(ValueError) as ex:
client.decrypt(EncryptionAlgorithm.a128_gcm, b"...", iv=b"...")
assert "authentication_tag" in str(ex.value) and "required" in str(ex.value)
with pytest.raises(ValueError) as ex:
client.decrypt(EncryptionAlgorithm.a192_cbcpad, b"...")
assert "iv" in str(ex.value) and "required" in str(ex.value)
| Azure/azure-sdk-for-python | sdk/keyvault/azure-keyvault-keys/tests/test_crypto_client.py | Python | mit | 41,908 | 0.003508 |
# Copyright 2020 The Google Authors. All Rights Reserved.
#
# Licensed under the MIT License (the "License");
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ==============================================================================
"""Run tests with LIBSVM dataset.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
from absl import app
from absl import flags
import numpy as np
import sklearn.datasets
from sklearn.model_selection import train_test_split
from agbt import AGBT
from agbt_b import AGBTB
import functional as F
from gbt import GBT
from tree import Dataset
from tensorflow.python.platform import gfile
FLAGS = flags.FLAGS
flags.DEFINE_string("data_folder", None, "The directory of datasets.")
flags.DEFINE_enum("dataset_name", "all_datasets",
["all_datasets", "a1a", "w1a", "housing"],
("The name of instances."
"`all_datasets` means all of the instances in the folder."))
flags.DEFINE_enum("loss", "L2Loss", ["L2Loss", "LogisticLoss"],
"The loss function.")
flags.DEFINE_enum(
"method", "AGBT", ["GBT", "AGBT", "AGBTB"],
("The method to use. GBT is the standard gradient boosted tree. AGBT is our"
"proposed method and AGBTB is the method proposed by Biau et al."))
flags.DEFINE_integer(
"early_stopping_rounds", 100000,
("Stop the algorithm if the validation loss does not improve after this"
"number of iterations."))
flags.DEFINE_float(
"z_shrinkage_parameter", 0.1,
"The shrinkage parameter in the z-update in accelerated method.")
flags.DEFINE_integer("max_depth", 3, "Maximal depth of a tree.")
flags.DEFINE_integer("num_trees", 20, "Number of boosting iterations.")
flags.DEFINE_float("min_split_gain", 0.1, "Minimal gain for splitting a leaf.")
flags.DEFINE_float("learning_rate", 0.3, "Learning rate.")
flags.DEFINE_float("regularizer_const", 1, "Regularizer constant.")
flags.DEFINE_boolean("use_hessian", False, "Whether to use Hessian.")
TEST_SIZE = 0.2
RANDOM_STATE = 40
LOSS = {"L2Loss": F.L2Loss, "LogisticLoss": F.LogisticLoss}
def SetupData(data_folder, dataset_name):
path = os.path.join(data_folder, dataset_name + ".txt")
data = sklearn.datasets.load_svmlight_file(gfile.Open(path, mode="rb"))
x = np.asarray(data[0].todense())
y = np.array(data[1])
return train_test_split(x, y, test_size=TEST_SIZE, random_state=RANDOM_STATE)
def main(argv):
del argv
if FLAGS.data_folder is None:
raise ValueError("Directory with downloaded datasets must be provided.")
if FLAGS.dataset_name == "all_datasets":
names = ["a1a", "w1a", "housing"]
else:
names = [FLAGS.dataset_name]
for name in names:
x_train, x_test, y_train, y_test = SetupData(FLAGS.data_folder, name)
train_data = Dataset(x_train, y_train)
test_data = Dataset(x_test, y_test)
GBTParams = collections.namedtuple("GBTParams", [
"regularizer_const", "min_split_gain", "max_depth", "learning_rate",
"num_trees", "early_stopping_rounds", "loss", "use_hessian",
"z_shrinkage_parameter"
])
params = GBTParams(
regularizer_const=FLAGS.regularizer_const,
min_split_gain=FLAGS.min_split_gain,
max_depth=FLAGS.max_depth,
learning_rate=FLAGS.learning_rate,
num_trees=FLAGS.num_trees,
early_stopping_rounds=FLAGS.early_stopping_rounds,
loss=FLAGS.loss,
use_hessian=FLAGS.use_hessian,
z_shrinkage_parameter=FLAGS.z_shrinkage_parameter)
if FLAGS.method == "GBT":
print("Start training using GBT...")
method = GBT(params)
elif FLAGS.method == "AGBT":
print("Start training using AGBT...")
method = AGBT(params)
elif FLAGS.method == "AGBTB":
print("Start training using AGBTB...")
method = AGBTB(params)
method.train(train_data, valid_set=test_data)
print("Start predicting...")
y_pred = []
for x in x_test:
y_pred.append(method.predict(x, num_iteration=method.best_iteration))
if params.loss == "L2Loss":
loss = F.L2Loss(params.use_hessian)
elif params.loss == "LogisticLoss":
loss = F.LogisticLoss(params.use_hessian)
print("The mean loss of prediction is:",
np.mean(loss.loss_value(np.array(y_pred), np.array(y_test))))
if __name__ == "__main__":
app.run(main)
| google-research/accelerated_gbm | solve_libsvm_instances.py | Python | mit | 4,858 | 0.004323 |
import __builtin__
import etcd
from etcd import Client
import importlib
import inspect
import maps
from mock import MagicMock
from mock import patch
import os
import pkgutil
import pytest
import yaml
from tendrl.commons import objects
import tendrl.commons.objects.node_context as node
from tendrl.commons import TendrlNS
from tendrl.commons.utils import etcd_utils
@patch.object(etcd, "Client")
@patch.object(Client, "read")
@patch.object(node.NodeContext, '_get_node_id')
@patch.object(etcd_utils, 'read')
@patch.object(node.NodeContext, 'load')
def init(patch_node_load,
patch_etcd_utils_read,
patch_get_node_id,
patch_read,
patch_client):
patch_get_node_id.return_value = 1
patch_read.return_value = etcd.Client()
patch_client.return_value = etcd.Client()
setattr(__builtin__, "NS", maps.NamedDict())
setattr(NS, "_int", maps.NamedDict())
NS._int.etcd_kwargs = {
'port': 1,
'host': 2,
'allow_reconnect': True}
NS._int.client = etcd.Client(**NS._int.etcd_kwargs)
NS["config"] = maps.NamedDict()
NS.config["data"] = maps.NamedDict()
NS.config.data['tags'] = "test"
patch_etcd_utils_read.return_value = maps.NamedDict(
value='{"status": "UP",'
'"pkey": "tendrl-node-test",'
'"node_id": "test_node_id",'
'"ipv4_addr": "test_ip",'
'"tags": "[\\"my_tag\\"]",'
'"sync_status": "done",'
'"locked_by": "fd",'
'"fqdn": "tendrl-node-test",'
'"last_sync": "date"}')
patch_node_load.return_value = node.NodeContext
tendrlNS = TendrlNS()
return tendrlNS
def test_constructor():
with patch.object(TendrlNS, 'setup_common_objects') as \
mocked_method:
mocked_method.return_value = None
tendrlNS = TendrlNS()
tendrlNS = init()
# Default Parameter Testing
assert tendrlNS.ns_name == "tendrl"
assert tendrlNS.ns_src == "tendrl.commons"
# Check for existance and right data type
assert isinstance(NS, maps.NamedDict)
# Testing _list_modules_in_package_path
def test_list_modules_in_package_path():
tendrlNS = init()
modules = [
('alert',
'tendrl.commons.objects.alert'),
('block_device',
'tendrl.commons.objects.block_device'),
('cluster',
'tendrl.commons.objects.cluster'),
('cluster_alert',
'tendrl.commons.objects.cluster_alert'),
('cluster_alert_counters',
'tendrl.commons.objects.cluster_alert_counters'),
('cluster_node_alert_counters',
'tendrl.commons.objects.cluster_node_alert_counters'),
('cluster_node_context',
'tendrl.commons.objects.cluster_node_context'),
('cluster_tendrl_context',
'tendrl.commons.objects.cluster_tendrl_context'),
('cpu', 'tendrl.commons.objects.cpu'),
('definition', 'tendrl.commons.objects.definition'),
('detected_cluster', 'tendrl.commons.objects.detected_cluster'),
('disk', 'tendrl.commons.objects.disk'),
('geo_replication_session',
'tendrl.commons.objects.geo_replication_session'),
('global_details',
'tendrl.commons.objects.global_details'),
('gluster_brick', 'tendrl.commons.objects.gluster_brick'),
('gluster_volume', 'tendrl.commons.objects.gluster_volume'),
('gluster_peer', 'tendrl.commons.objects.gluster_peer'),
('job', 'tendrl.commons.objects.job'),
('memory', 'tendrl.commons.objects.memory'),
('node', 'tendrl.commons.objects.node'),
('node_alert',
'tendrl.commons.objects.node_alert'),
('node_context', 'tendrl.commons.objects.node_context'),
('node_network', 'tendrl.commons.objects.node_network'),
('notification_only_alert',
'tendrl.commons.objects.notification_only_alert'),
('os', 'tendrl.commons.objects.os'),
('platform', 'tendrl.commons.objects.platform'),
('service', 'tendrl.commons.objects.service'),
('tendrl_context', 'tendrl.commons.objects.tendrl_context'),
('virtual_disk', 'tendrl.commons.objects.virtual_disk')
]
ns_objects_path = os.path.join(os.path.dirname(os.path.abspath(__file__)).
rsplit('/', 1)[0], "objects")
ns_objects_prefix = "tendrl.commons.objects."
ret = tendrlNS._list_modules_in_package_path(ns_objects_path,
ns_objects_prefix)
# TO-DISCUSS : modules is hard coded and might change in future
if len(ret) != len(modules):
raise AssertionError()
ret = tendrlNS._list_modules_in_package_path("test", "test")
assert len(ret) == 0
# Testing _register_subclasses_to_ns
def test_register_subclasses_to_ns(monkeypatch):
tendrlNS = init()
tendrlNS._register_subclasses_to_ns()
assert len(getattr(NS.tendrl, "objects")) > 0
assert len(getattr(NS.tendrl, "flows")) > 0
ns_objects_path = os.path.join(
os.path.dirname(
os.path.abspath(__file__)).rsplit(
'/', 1)[0], "objects")
ns_objects_prefix = "tendrl.commons.objects."
modules = tendrlNS._list_modules_in_package_path(ns_objects_path,
ns_objects_prefix)
for mode_name, mod_cls in modules:
assert hasattr(NS.tendrl.objects, mode_name.title().replace('_', '')) \
is True
def list_package(self_obj, package_path, prefix):
if "flows" in prefix:
return [
('ImportCluster', 'tendrl.commons.flows.import_cluster'),
('UnmanageCluster', 'tendrl.commons.flows.unmanage_cluster')
]
else:
modules = []
for importer, name, ispkg in pkgutil.walk_packages(
path=[package_path]):
modules.append((name, prefix + name))
return modules
monkeypatch.setattr(TendrlNS, '_list_modules_in_package_path',
list_package)
tendrlNS._register_subclasses_to_ns()
assert len(getattr(NS.tendrl, "objects")) > 0
# Testing _add_object
def test_add_object():
tendrlNS = init()
obj_name = "test_obj"
obj = importlib.import_module(
"tendrl.commons.objects.cluster_node_context")
current_ns = tendrlNS._get_ns()
obj_cls = ""
for obj_cls in inspect.getmembers(obj, inspect.isclass):
tendrlNS._add_object(obj_name, obj_cls[1])
break
assert isinstance(getattr(current_ns.objects, "_test_obj")['atoms'],
maps.NamedDict)
assert isinstance(getattr(current_ns.objects, "_test_obj")['flows'],
maps.NamedDict)
with patch.object(TendrlNS, "_get_ns") as mock_add_obj:
mock_add_obj.return_value = maps.NamedDict(
objects=maps.NamedDict(_Service=maps.NamedDict(
atoms=maps.NamedDict())))
tendrlNS._add_object("Service", obj_cls[1])
with patch.object(TendrlNS, "_get_ns") as mock_add_obj:
mock_add_obj.return_value = maps.NamedDict(
objects=maps.NamedDict(
_Service=maps.NamedDict(
flows=maps.NamedDict())))
tendrlNS._add_object("Service", obj_cls[1])
# Testing _get_objects
def test_get_objects():
path = os.path.join(os.path.dirname(
os.path.dirname(os.path.abspath(__file__))), "objects")
objects_list = [d.title().replace('_', '') for d in os.listdir(path)
if os.path.isdir(os.path.join(path, d))]
tendrlNS = init()
ret = tendrlNS._get_objects()
assert isinstance(objects_list, list)
assert ret is not None
# TO-DISCUSS : object_list is hard coded and might change in future
assert set(ret) == set(objects_list)
# Testing _get_object
def test_get_object():
tendrlNS = init()
ret = tendrlNS._get_object("NodeNetwork")
assert (inspect.isclass(ret)) is True
assert (issubclass(ret, objects.BaseObject)) is True
path = os.path.join(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))), "objects",
"definition")
with open(os.path.join(path, "master.yaml"), 'r') as f:
definition = yaml.safe_load(f)
def_obj = definition["namespace.tendrl"]["objects"]["NodeNetwork"]["attrs"]
# Creating instance of the class
temp_instance = ret()
# Comparing attributes of object from actual definition
for k, v in def_obj.items():
assert hasattr(temp_instance, k.lower())
# Testing _get_ns():
def test_get_ns():
tendrlNS = init()
assert isinstance(tendrlNS._get_ns(), maps.NamedDict) is True
tendrlNS.ns_name = "integrations"
tendrlNS._create_ns()
assert isinstance(tendrlNS._get_ns(), maps.NamedDict) is True
# Testing get_obj_definition
def test_get_obj_definition():
tendrlNS = init()
ret = tendrlNS.get_obj_definition("Service")
assert ret is not None
assert isinstance(ret, maps.NamedDict) is True
assert hasattr(ret, "attrs") is True
NS["compiled_definitions"] = tendrlNS.current_ns.definitions
ret = tendrlNS.get_obj_definition("Service")
assert ret is not None
assert isinstance(ret, maps.NamedDict) is True
assert hasattr(ret, "attrs") is True
# Testing get_obj_flow_definition
def test_get_obj_flow_definition():
tendrlNS = init()
with pytest.raises(KeyError):
tendrlNS.get_obj_flow_definition("Service", "test")
# Testing get_flow_definiiton()
def test_get_flow_definition():
tendrlNS = init()
with pytest.raises(KeyError):
tendrlNS.get_flow_definition("BaseFlow")
NS["compiled_definitions"] = tendrlNS.current_ns.definitions
tendrlNS.get_flow_definition("ImportCluster")
# Testing get_atom_definition
def test_get_atom_definition():
tendrlNS = init()
ret = tendrlNS.get_atom_definition("Service", "CheckServiceStatus")
assert ret is not None
assert isinstance(ret, maps.NamedDict) is True
assert hasattr(ret, "inputs") is True
# Testing add_atom
def test_add_atom():
tendrlNS = init()
obj_name = "Service"
current_ns = tendrlNS._get_ns()
obj = importlib.import_module(
"tendrl.commons.objects.service.atoms.check_service_status")
atom_class = ""
for atom_cls in inspect.getmembers(obj, inspect.isclass):
tendrlNS._add_atom(obj_name, "test_atom", atom_cls[1])
atom_class = atom_cls[1]
break
assert hasattr(current_ns.objects["_Service"]['atoms'], "test_atom")
assert current_ns.objects["_Service"]['atoms']["test_atom"] == atom_class
# Testing setup_definitions
def test_setup_definitions():
tendrlNS = init()
tendrlNS.setup_definitions()
assert tendrlNS.current_ns is not None
assert isinstance(tendrlNS.current_ns, maps.NamedDict) is True
# Testing add_flow
def test_add_flow():
tendrlNS = init()
flow_class = ""
flow = importlib.import_module("tendrl.commons.flows.import_cluster")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_flow("test_flow", flow_cls[1])
flow_class = flow_cls[1]
break
current_ns = tendrlNS._get_ns()
assert hasattr(current_ns.flows, "test_flow") is True
assert current_ns.flows["test_flow"] is flow_class
# Testing get_flow
def test_get_flow():
tendrlNS = init()
ret = tendrlNS.get_flow("ImportCluster")
assert ret is not None
# Testing add_obj_flow
def test_add_obj_flow():
tendrlNS = init()
flow = importlib.import_module("tendrl.commons.flows")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_obj_flow("Node", "AtomExecutionFailedError", flow_cls[1])
break
ret = tendrlNS.get_obj_flow("Node", "AtomExecutionFailedError")
assert ret is not None
assert (inspect.isclass(ret)) is True
# Testing get_obj_flow
def test_get_obj_flow():
tendrlNS = init()
flow = importlib.import_module("tendrl.commons.flows")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_obj_flow("Node", "AtomExecutionFailedError", flow_cls[1])
break
ret = tendrlNS.get_obj_flow("Node", "AtomExecutionFailedError")
assert ret is not None
assert (inspect.isclass(ret)) is True
# Testing get_obj_flows
def test_get_obj_flows():
tendrlNS = init()
flow = importlib.import_module("tendrl.commons.flows")
for flow_cls in inspect.getmembers(flow, inspect.isclass):
tendrlNS._add_obj_flow("Node", "AtomExecutionFailedError", flow_cls[1])
break
ret = tendrlNS._get_obj_flows("Node")
assert ret is not None
assert isinstance(ret, maps.NamedDict)
# Testing get_atom
def test_get_atom():
tendrlNS = init()
ret = tendrlNS.get_atom("Node", "Cmd")
assert ret is not None
assert (inspect.isclass(ret)) is True
# Testing get_atoms
def test_get_atoms():
tendrlNS = init()
ret = tendrlNS._get_atoms("Node")
assert ret is not None
assert isinstance(ret, maps.NamedDict)
# Testing _create_ns()
def test_create_ns():
tendrlNS = init()
assert getattr(NS, "tendrl")
tendrlNS.ns_name = "integrations"
tendrlNS._create_ns()
assert getattr(NS, "integrations")
tendrlNS._create_ns()
# Testing_validate_ns_flow_definitions
def test_validate_ns_flow_definitions():
tendrlNS = init()
raw_ns = "namespace.tendrl"
defs = tendrlNS.current_ns.definitions.get_parsed_defs()[raw_ns]
defs["flows"]["test"] = maps.NamedDict()
with pytest.raises(Exception):
tendrlNS._validate_ns_flow_definitions(raw_ns, defs)
tendrlNS.current_ns.flows["Test"] = "Test Flow"
with pytest.raises(Exception):
tendrlNS._validate_ns_flow_definitions(raw_ns, defs)
tendrlNS.current_ns.flows = None
defs = maps.NamedDict()
tendrlNS._validate_ns_flow_definitions(raw_ns, defs)
# Testing _validate_ns_obj_definitions
def test_validate_ns_obj_definitions():
tendrlNS = init()
raw_ns = "namespace.tendrl"
defs = tendrlNS.current_ns.definitions.get_parsed_defs()[raw_ns]
defs_temp = defs
defs_temp["objects"]["TestObject"] = maps.NamedDict()
with pytest.raises(Exception):
tendrlNS._validate_ns_obj_definitions(raw_ns, defs_temp)
tendrlNS.current_ns.objects["_Node"]["atoms"]["Test"] = \
"Test atom class"
with pytest.raises(Exception):
tendrlNS._validate_ns_obj_definitions(raw_ns, defs)
tendrlNS_temp = init()
tendrlNS_temp.current_ns.objects["_Node"]["flows"]["Test"] = \
"Test flow class"
with pytest.raises(Exception):
tendrlNS_temp._validate_ns_obj_definitions(raw_ns, defs)
tendrlNS.current_ns.objects["Test"] = "Test Object"
with pytest.raises(Exception):
tendrlNS._validate_ns_obj_definitions(raw_ns, defs)
tendrlNS_temp = init()
defs = tendrlNS_temp.current_ns.definitions.get_parsed_defs()[raw_ns]
defs["objects"]["Node"]["atoms"]["Test"] = \
"Test atom class"
with pytest.raises(Exception):
tendrlNS_temp._validate_ns_obj_definitions(raw_ns, defs)
defs = tendrlNS_temp.current_ns.definitions.get_parsed_defs()[raw_ns]
defs["objects"]["Node"]["flows"] = maps.NamedDict()
defs["objects"]["Node"]["flows"]["Test"] = "Test flow class"
with pytest.raises(Exception):
tendrlNS_temp._validate_ns_obj_definitions(raw_ns, defs)
defs = maps.NamedDict()
tendrlNS.current_ns.objects = None
tendrlNS._validate_ns_obj_definitions(raw_ns, defs)
# Testing _validate_ns_definitions
def test_validate_ns_definitions():
tendrlNS = init()
tendrlNS._validate_ns_obj_definitions = MagicMock(return_value=None)
tendrlNS._validate_ns_definitions()
raw_ns = "namespace.tendrl"
defs = tendrlNS.current_ns.definitions.get_parsed_defs()[raw_ns]
tendrlNS._validate_ns_obj_definitions.assert_called_with(raw_ns, defs)
tendrlNS._validate_ns_flow_definitions = MagicMock(return_value=None)
tendrlNS._validate_ns_definitions()
tendrlNS._validate_ns_flow_definitions.assert_called_with(raw_ns, defs)
tendrlNS.current_ns.definitions = maps.NamedDict()
with pytest.raises(Exception):
tendrlNS._validate_ns_definitions()
# Testing setup_common_objects
def test_setup_common_objects(monkeypatch):
tendrlNS = init()
obj = importlib.import_module("tendrl.commons.tests.fixtures.config")
for obj_cls in inspect.getmembers(obj, inspect.isclass):
tendrlNS.current_ns.objects["Config"] = obj_cls[1]
with patch.object(etcd, "Client", return_value=etcd.Client()) as client:
tendrlNS.current_ns.objects.pop("NodeContext")
tendrlNS.setup_common_objects()
assert NS._int.client is not None
assert NS._int.wclient is not None
etcd.Client.assert_called_with(host=1, port=1)
tendrlNS.current_ns.objects.pop("TendrlContext")
tendrlNS.setup_common_objects()
def client(**param):
raise Exception
monkeypatch.setattr(etcd, 'Client', client)
with pytest.raises(Exception):
tendrlNS.setup_common_objects()
| r0h4n/commons | tendrl/commons/tests/test_init.py | Python | lgpl-2.1 | 17,194 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from contextlib import contextmanager
@contextmanager
def log(name):
print('[%s] start...' % name)
yield
print('[%s] end.' % name)
with log('DEBUG'):
print('Hello, world!')
print('Hello, Python!')
| whyDK37/py_bootstrap | samples/context/do_with.py | Python | apache-2.0 | 269 | 0 |
#!/usr/bin/python
# Copyright 2008-2010 WebDriver committers
# Copyright 2008-2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import tempfile
import time
import shutil
import unittest
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoSuchFrameException
def not_available_on_remote(func):
def testMethod(self):
print self.driver
if type(self.driver) == 'remote':
return lambda x: None
else:
return func(self)
return testMethod
class CorrectEventFiringTests(unittest.TestCase):
def testShouldFireClickEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("click")
def testShouldFireMouseDownEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mousedown")
def testShouldFireMouseUpEventWhenClicking(self):
self._loadPage("javascriptPage")
self._clickOnElementWhichRecordsEvents()
self._assertEventFired("mouseup")
def testShouldIssueMouseDownEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mousedown").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldIssueClickEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseclick").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse click")
def testShouldIssueMouseUpEvents(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("mouseup").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse up")
def testMouseEventsShouldBubbleUpToContainingElements(self):
self._loadPage("javascriptPage")
self.driver.find_element_by_id("child").click()
result = self.driver.find_element_by_id("result").text
self.assertEqual(result, "mouse down")
def testShouldEmitOnChangeEventsWhenSelectingElements(self):
self._loadPage("javascriptPage")
# Intentionally not looking up the select tag. See selenium r7937 for details.
allOptions = self.driver.find_elements_by_xpath("//select[@id='selector']//option")
initialTextValue = self.driver.find_element_by_id("result").text
foo = allOptions[0]
bar = allOptions[1]
foo.select()
self.assertEqual(self.driver.find_element_by_id("result").text, initialTextValue)
bar.select()
self.assertEqual(self.driver.find_element_by_id("result").text, "bar")
def testShouldEmitOnChangeEventsWhenChangingTheStateOfACheckbox(self):
self._loadPage("javascriptPage")
checkbox = self.driver.find_element_by_id("checkbox")
checkbox.select()
self.assertEqual(self.driver.find_element_by_id("result").text, "checkbox thing")
def testShouldEmitClickEventWhenClickingOnATextInputElement(self):
self._loadPage("javascriptPage")
clicker = self.driver.find_element_by_id("clickField")
clicker.click()
self.assertEqual(clicker.get_attribute("value"), "Clicked")
def testClearingAnElementShouldCauseTheOnChangeHandlerToFire(self):
self._loadPage("javascriptPage")
element = self.driver.find_element_by_id("clearMe")
element.clear()
result = self.driver.find_element_by_id("result")
self.assertEqual(result.text, "Cleared");
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnotherElementShouldCauseTheBlurEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# element2 = self.driver.find_element_by_id("changeable")
# element2.send_keys("bar")
# self._assertEventFired("blur")
# TODO Currently Failing and needs fixing
#def testSendingKeysToAnElementShouldCauseTheFocusEventToFire(self):
# self._loadPage("javascriptPage")
# element = self.driver.find_element_by_id("theworks")
# element.send_keys("foo")
# self._assertEventFired("focus")
def _clickOnElementWhichRecordsEvents(self):
self.driver.find_element_by_id("plainButton").click()
def _assertEventFired(self, eventName):
result = self.driver.find_element_by_id("result")
text = result.text
self.assertTrue(eventName in text, "No " + eventName + " fired: " + text)
def _pageURL(self, name):
return "http://localhost:%d/%s.html" % (self.webserver.port, name)
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| akiellor/selenium | py/test/selenium/webdriver/common/correct_event_firing_tests.py | Python | apache-2.0 | 5,557 | 0.003599 |
def test_basic_editor(scratch_tree):
sess = scratch_tree.edit('/')
assert sess.id == ''
assert sess.path == '/'
assert sess.record is not None
assert sess['_model'] == 'page'
assert sess['title'] == 'Index'
assert sess['body'] == 'Hello World!'
sess['body'] = 'A new body'
sess.commit()
assert sess.closed
with open(sess.get_fs_path()) as f:
assert f.read().splitlines() == [
'_model: page',
'---',
'title: Index',
'---',
'body: A new body'
]
def test_create_alt(scratch_tree, scratch_pad):
sess = scratch_tree.edit('/', alt='de')
assert sess.id == ''
assert sess.path == '/'
assert sess.record is not None
assert sess['_model'] == 'page'
assert sess['title'] == 'Index'
assert sess['body'] == 'Hello World!'
sess['body'] = 'Hallo Welt!'
sess.commit()
assert sess.closed
# When we use the editor to change this, we only want the fields that
# changed compared to the base to be included.
with open(sess.get_fs_path(alt='de')) as f:
assert f.read().splitlines() == [
'body: Hallo Welt!'
]
scratch_pad.cache.flush()
item = scratch_pad.get('/', alt='de')
assert item['_slug'] == ''
assert item['title'] == 'Index'
assert item['body'].source == 'Hallo Welt!'
assert item['_model'] == 'page'
| bameda/lektor | tests/test_editor.py | Python | bsd-3-clause | 1,424 | 0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.