text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from lcapy import R, C
n = C('C1') | (R('R1') + (C('C2') | (R('R2') + (C('C3') | (R('R3') + C('C4'))))))
n.draw(__file__.replace('.py', '.png'), layout='ladder')
| mph-/lcapy | doc/examples/networks/ladderRC3.py | Python | lgpl-2.1 | 163 | 0.01227 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NetworkInterfaceAssociation(Model):
"""Network interface and its custom security rules.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Network interface ID.
:vartype id: str
:param security_rules: Collection of custom security rules.
:type security_rules:
list[~azure.mgmt.network.v2018_01_01.models.SecurityRule]
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'security_rules': {'key': 'securityRules', 'type': '[SecurityRule]'},
}
def __init__(self, *, security_rules=None, **kwargs) -> None:
super(NetworkInterfaceAssociation, self).__init__(**kwargs)
self.id = None
self.security_rules = security_rules
| lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/network_interface_association_py3.py | Python | mit | 1,345 | 0 |
from django.db import models
# Create your models here.
class Collage(models.Model):
name = models.CharField(max_length=50)
slug = models.SlugField(unique=True)
no_of_students = models.IntegerField()
year_estd = models.IntegerField()
def __str__(self):
return self.name
class Faculty(models.Model):
name = models.CharField(max_length=100)
price = models.IntegerField()
year_started = models.IntegerField()
collage = models.ForeignKey(Collage, default=None)
| ayys/collegesearch | main/models.py | Python | gpl-3.0 | 507 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from common import find_target_items
if len(sys.argv) != 3:
print("Find the value keyword in all pairs")
print(("Usage: ", sys.argv[0], "[input] [keyword]"))
exit(1)
find_target_items(sys.argv[1], sys.argv[2])
| BYVoid/OpenCC | data/scripts/find_target.py | Python | apache-2.0 | 282 | 0 |
from __future__ import unicode_literals
"""Migrating IPython < 4.0 to Jupyter
This *copies* configuration and resources to their new locations in Jupyter
Migrations:
- .ipython/
- nbextensions -> JUPYTER_DATA_DIR/nbextensions
- kernels -> JUPYTER_DATA_DIR/kernels
- .ipython/profile_default/
- static/custom -> .jupyter/custom
- nbconfig -> .jupyter/nbconfig
- security/
- notebook_secret, notebook_cookie_secret, nbsignatures.db -> JUPYTER_DATA_DIR
- ipython_{notebook,nbconvert,qtconsole}_config.py -> .jupyter/jupyter_{name}_config.py
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import os
import re
import shutil
from datetime import datetime
from traitlets.config import PyFileConfigLoader, JSONFileConfigLoader
from traitlets.log import get_logger
from ipython_genutils.path import ensure_dir_exists
try:
from IPython.paths import get_ipython_dir
except ImportError:
# IPython < 4
try:
from IPython.utils.path import get_ipython_dir
except ImportError:
def get_ipython_dir():
return os.environ.get('IPYTHONDIR', os.path.expanduser('~/.ipython'))
from .paths import jupyter_config_dir, jupyter_data_dir
from .application import JupyterApp
pjoin = os.path.join
migrations = {
pjoin('{ipython_dir}', 'nbextensions'): pjoin('{jupyter_data}', 'nbextensions'),
pjoin('{ipython_dir}', 'kernels'): pjoin('{jupyter_data}', 'kernels'),
pjoin('{profile}', 'nbconfig'): pjoin('{jupyter_config}', 'nbconfig'),
}
custom_src_t = pjoin('{profile}', 'static', 'custom')
custom_dst_t = pjoin('{jupyter_config}', 'custom')
for security_file in ('notebook_secret', 'notebook_cookie_secret', 'nbsignatures.db'):
src = pjoin('{profile}', 'security', security_file)
dst = pjoin('{jupyter_data}', security_file)
migrations[src] = dst
config_migrations = ['notebook', 'nbconvert', 'qtconsole']
regex = re.compile
config_substitutions = {
regex(r'\bIPythonQtConsoleApp\b'): 'JupyterQtConsoleApp',
regex(r'\bIPythonWidget\b'): 'JupyterWidget',
regex(r'\bRichIPythonWidget\b'): 'RichJupyterWidget',
regex(r'\bIPython\.html\b'): 'notebook',
regex(r'\bIPython\.nbconvert\b'): 'nbconvert',
}
def migrate_dir(src, dst):
"""Migrate a directory from src to dst"""
log = get_logger()
if not os.listdir(src):
log.debug("No files in %s" % src)
return False
if os.path.exists(dst):
if os.listdir(dst):
# already exists, non-empty
log.debug("%s already exists" % dst)
return False
else:
os.rmdir(dst)
log.info("Copying %s -> %s" % (src, dst))
ensure_dir_exists(os.path.dirname(dst))
shutil.copytree(src, dst, symlinks=True)
return True
def migrate_file(src, dst, substitutions=None):
"""Migrate a single file from src to dst
substitutions is an optional dict of {regex: replacement} for performing replacements on the file.
"""
log = get_logger()
if os.path.exists(dst):
# already exists
log.debug("%s already exists" % dst)
return False
log.info("Copying %s -> %s" % (src, dst))
ensure_dir_exists(os.path.dirname(dst))
shutil.copy(src, dst)
if substitutions:
with open(dst) as f:
text = f.read()
for pat, replacement in substitutions.items():
text = pat.sub(replacement, text)
with open(dst, 'w') as f:
f.write(text)
return True
def migrate_one(src, dst):
"""Migrate one item
dispatches to migrate_dir/_file
"""
log = get_logger()
if os.path.isfile(src):
return migrate_file(src, dst)
elif os.path.isdir(src):
return migrate_dir(src, dst)
else:
log.debug("Nothing to migrate for %s" % src)
return False
def migrate_static_custom(src, dst):
"""Migrate non-empty custom.js,css from src to dst
src, dst are 'custom' directories containing custom.{js,css}
"""
log = get_logger()
migrated = False
custom_js = pjoin(src, 'custom.js')
custom_css = pjoin(src, 'custom.css')
# check if custom_js is empty:
custom_js_empty = True
if os.path.isfile(custom_js):
with open(custom_js) as f:
js = f.read().strip()
for line in js.splitlines():
if not (
line.isspace()
or line.strip().startswith(('/*', '*', '//'))
):
custom_js_empty = False
break
# check if custom_css is empty:
custom_css_empty = True
if os.path.isfile(custom_css):
with open(custom_css) as f:
css = f.read().strip()
custom_css_empty = css.startswith('/*') and css.endswith('*/')
if custom_js_empty:
log.debug("Ignoring empty %s" % custom_js)
if custom_css_empty:
log.debug("Ignoring empty %s" % custom_css)
if custom_js_empty and custom_css_empty:
# nothing to migrate
return False
ensure_dir_exists(dst)
if not custom_js_empty or not custom_css_empty:
ensure_dir_exists(dst)
if not custom_js_empty:
if migrate_file(custom_js, pjoin(dst, 'custom.js')):
migrated = True
if not custom_css_empty:
if migrate_file(custom_css, pjoin(dst, 'custom.css')):
migrated = True
return migrated
def migrate_config(name, env):
"""Migrate a config file
Includes substitutions for updated configurable names.
"""
log = get_logger()
src_base = pjoin('{profile}', 'ipython_{name}_config').format(name=name, **env)
dst_base = pjoin('{jupyter_config}', 'jupyter_{name}_config').format(name=name, **env)
loaders = {
'.py': PyFileConfigLoader,
'.json': JSONFileConfigLoader,
}
migrated = []
for ext in ('.py', '.json'):
src = src_base + ext
dst = dst_base + ext
if os.path.exists(src):
cfg = loaders[ext](src).load_config()
if cfg:
if migrate_file(src, dst, substitutions=config_substitutions):
migrated.append(src)
else:
# don't migrate empty config files
log.debug("Not migrating empty config file: %s" % src)
return migrated
def migrate():
"""Migrate IPython configuration to Jupyter"""
env = {
'jupyter_data': jupyter_data_dir(),
'jupyter_config': jupyter_config_dir(),
'ipython_dir': get_ipython_dir(),
'profile': os.path.join(get_ipython_dir(), 'profile_default'),
}
migrated = False
for src_t, dst_t in migrations.items():
src = src_t.format(**env)
dst = dst_t.format(**env)
if os.path.exists(src):
if migrate_one(src, dst):
migrated = True
for name in config_migrations:
if migrate_config(name, env):
migrated = True
custom_src = custom_src_t.format(**env)
custom_dst = custom_dst_t.format(**env)
if os.path.exists(custom_src):
if migrate_static_custom(custom_src, custom_dst):
migrated = True
# write a marker to avoid re-running migration checks
ensure_dir_exists(env['jupyter_config'])
with open(os.path.join(env['jupyter_config'], 'migrated'), 'w') as f:
f.write(datetime.utcnow().isoformat())
return migrated
class JupyterMigrate(JupyterApp):
name = 'jupyter-migrate'
description = """
Migrate configuration and data from .ipython prior to 4.0 to Jupyter locations.
This migrates:
- config files in the default profile
- kernels in ~/.ipython/kernels
- notebook javascript extensions in ~/.ipython/extensions
- custom.js/css to .jupyter/custom
to their new Jupyter locations.
All files are copied, not moved.
If the destinations already exist, nothing will be done.
"""
def start(self):
if not migrate():
self.log.info("Found nothing to migrate.")
main = JupyterMigrate.launch_instance
if __name__ == '__main__':
main()
| unnikrishnankgs/va | venv/lib/python3.5/site-packages/jupyter_core/migrate.py | Python | bsd-2-clause | 8,222 | 0.004014 |
import pytz
import datetime
# statics
UNKNOWN_TYPE = 'Unknown'
DEFAULT_FREQ = 0 # No automatic update by default
DEFAULT_TIMEZONE = 'UTC'
DEFAULT_STATUS = False
class Component(dict):
# Private attributes
client_hostname = None
server_host = None
server_user = None
server_password = None
# Public attributes
component_type = UNKNOWN_TYPE
type = UNKNOWN_TYPE
url = None
hostname = None
name = None
freq = DEFAULT_FREQ
status = DEFAULT_STATUS
last_value = None
last_value_dt = None
timezone = None
# Init method uses dict so we can pass any field for creation
def __init__(self, **kwargs):
super(Component, self).__init__(**kwargs)
self.__dict__ = self
self.timezone = pytz.timezone(DEFAULT_TIMEZONE)
# Read a component next refresh date
def refresh_dt(self):
if self.last_value_dt is None:
return datetime.datetime.now(self.timezone)
else:
new_value_dt = self.last_value_dt + datetime.timedelta(seconds=self.freq)
return max(new_value_dt, datetime.datetime.now(self.timezone))
# Add a server hosting config
def add_config_server(self, client_hostname, server_host, server_user, server_password):
self.client_hostname = client_hostname
self.server_host = server_host
self.server_user = server_user
self.server_password = server_password
# Configure the component
@staticmethod
def initialize():
return True
# Register a new component on the server
def register(self):
return True
# Update the component
def update(self):
self.last_value_dt = datetime.datetime.now(self.timezone)
return True
| AMairesse/hc-client | src/hc_component.py | Python | gpl-2.0 | 1,780 | 0.002809 |
# This file is part of curious.
#
# curious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# curious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with curious. If not, see <http://www.gnu.org/licenses/>.
"""
Exceptions raised from within the library.
.. currentmodule:: curious.exc
"""
import enum
import warnings
from asks.response_objects import Response
class CuriousError(Exception):
"""
The base class for all curious exceptions.
"""
# HTTP based exceptions.
class ErrorCode(enum.IntEnum):
UNKNOWN_ACCOUNT = 10001
UNKNOWN_APPLICATION = 10002
UNKNOWN_CHANNEL = 10003
UNKNOWN_GUILD = 10004
UNKNOWN_INTEGRATION = 10005
UNKNOWN_INVITE = 10006
UNKNOWN_MEMBER = 10007
UNKNOWN_MESSAGE = 10008
UNKNOWN_OVERWRITE = 1009
UNKNOWN_PROVIDER = 10010
UNKNOWN_ROLE = 10011
UNKNOWN_TOKEN = 10012
UNKNOWN_USER = 10013
UNKNOWN_EMOJI = 10014
NO_BOTS = 20001
ONLY_BOTS = 20002
MAX_GUILDS = 30001 # technically user only
MAX_FRIENDS = 30002
MAX_PINS = 30003
MAX_ROLES = 30005
MAX_REACTIONS = 30010
MAX_GUILD_CHANNELS = 30013
UNAUTHORIZED = 40001
MISSING_ACCESS = 50001
INVALID_ACCOUNT = 50002
NO_DMS = 50003
EMBED_DISABLED = 50004
CANNOT_EDIT = 50005
CANNOT_SEND_EMPTY_MESSAGE = 50006
CANNOT_SEND_TO_USER = 50007
CANNOT_SEND_TO_VC = 50008
VERIFICATION_TOO_HIGH = 50009
OAUTH2_NO_BOT = 50010
OAUTH2_LIMIT = 50011
INVALID_OAUTH_STATE = 50012
MISSING_PERMISSIONS = 50013
INVALID_AUTH_TOKEN = 50014
NOTE_TOO_LONG = 50015
INVALID_MESSAGE_COUNT = 50016
CANNOT_PIN = 50019
INVALID_VANITY_URL = 50020
TOO_OLD_TO_BULK_DELETE = 50034
INVALID_FORM_BODY = 50035
INVALID_INVITE_GUILD = 50036
REACTION_BLOCKED = 90001
UNKNOWN = 0
class HTTPException(CuriousError, ConnectionError):
"""
Raised when a HTTP request fails with a 400 <= e < 600 error code.
"""
def __init__(self, response: Response, error: dict):
self.response = response
error_code = error.get("code", 0)
try:
#: The error code for this response.
self.error_code = ErrorCode(error_code)
except ValueError:
warnings.warn(f"Received unknown error code {error_code}")
#: The error code for this response.
self.error_code = ErrorCode.UNKNOWN
self.error_message = error.get("message")
self.error = error
def __str__(self) -> str:
if self.error_code == ErrorCode.UNKNOWN:
return repr(self.error)
return "{} ({}): {}".format(self.error_code, self.error_code.name, self.error_message)
__repr__ = __str__
class Unauthorized(HTTPException):
"""
Raised when your bot token is invalid.
"""
class Forbidden(HTTPException):
"""
Raised when you don't have permission for something.
"""
class NotFound(HTTPException):
"""
Raised when something could not be found.
"""
class PermissionsError(CuriousError, PermissionError):
"""
Raised when you do not have sufficient permission to perform an action.
:ivar permission_required: The string of the permission required to perform this action.
"""
def __init__(self, permission_required: str):
self.permission_required = permission_required
def __str__(self) -> str:
return "Bot requires the permission {} to perform this action"\
.format(self.permission_required)
__repr__ = __str__
class HierarchyError(CuriousError, PermissionError):
"""
Raised when you can't do something due to the hierarchy.
"""
| SunDwarf/curious | curious/exc.py | Python | mit | 4,130 | 0.000484 |
import gdb
# This is not quite right, as local vars may override symname
def read_global_var (symname):
return gdb.selected_frame().read_var(symname)
def g_quark_to_string (quark):
if quark == None:
return None
quark = long(quark)
if quark == 0:
return None
try:
val = read_global_var ("quarks")
max_q = long(read_global_var ("quark_seq_id"))
except:
try:
val = read_global_var ("g_quarks")
max_q = long(read_global_var ("g_quark_seq_id"))
except:
return None;
if quark < max_q:
return val[quark].string()
return None
# We override the node printers too, so that node->next is not expanded
class GListNodePrinter:
"Prints a GList node"
def __init__ (self, val):
self.val = val
def to_string (self):
return "{data=%s, next=0x%x, prev=0x%x}" % (str(self.val["data"]), long(self.val["next"]), long(self.val["prev"]))
class GSListNodePrinter:
"Prints a GSList node"
def __init__ (self, val):
self.val = val
def to_string (self):
return "{data=%s, next=0x%x}" % (str(self.val["data"]), long(self.val["next"]))
class GListPrinter:
"Prints a GList"
class _iterator:
def __init__(self, head, listtype):
self.link = head
self.listtype = listtype
self.count = 0
def __iter__(self):
return self
def next(self):
if self.link == 0:
raise StopIteration
data = self.link['data']
self.link = self.link['next']
count = self.count
self.count = self.count + 1
return ('[%d]' % count, data)
def __init__ (self, val, listtype):
self.val = val
self.listtype = listtype
def children(self):
return self._iterator(self.val, self.listtype)
def to_string (self):
return "0x%x" % (long(self.val))
def display_hint (self):
return "array"
class GHashPrinter:
"Prints a GHashTable"
class _iterator:
def __init__(self, ht, keys_are_strings):
self.ht = ht
if ht != 0:
self.keys = ht["keys"]
self.values = ht["values"]
self.hashes = ht["hashes"]
self.size = ht["size"]
self.pos = 0
self.keys_are_strings = keys_are_strings
self.value = None
def __iter__(self):
return self
def next(self):
if self.ht == 0:
raise StopIteration
if self.value != None:
v = self.value
self.value = None
return v
while long(self.pos) < long(self.size):
self.pos = self.pos + 1
if long (self.hashes[self.pos]) >= 2:
key = self.keys[self.pos]
val = self.values[self.pos]
if self.keys_are_strings:
key = key.cast (gdb.lookup_type("char").pointer())
# Queue value for next result
self.value = ('[%dv]'% (self.pos), val)
# Return key
return ('[%dk]'% (self.pos), key)
raise StopIteration
def __init__ (self, val):
self.val = val
self.keys_are_strings = False
try:
string_hash = read_global_var ("g_str_hash")
except:
string_hash = None
if self.val != 0 and string_hash != None and self.val["hash_func"] == string_hash:
self.keys_are_strings = True
def children(self):
return self._iterator(self.val, self.keys_are_strings)
def to_string (self):
return "0x%x" % (long(self.val))
def display_hint (self):
return "map"
def pretty_printer_lookup (val):
if is_g_type_instance (val):
return GTypePrettyPrinter (val)
def pretty_printer_lookup (val):
# None yet, want things like hash table and list
type = val.type.unqualified()
# If it points to a reference, get the reference.
if type.code == gdb.TYPE_CODE_REF:
type = type.target ()
if type.code == gdb.TYPE_CODE_PTR:
type = type.target().unqualified()
t = str(type)
if t == "GList":
return GListPrinter(val, "GList")
if t == "GSList":
return GListPrinter(val, "GSList")
if t == "GHashTable":
return GHashPrinter(val)
else:
t = str(type)
if t == "GList":
return GListNodePrinter(val)
if t == "GSList *":
return GListPrinter(val, "GSList")
return None
def register (obj):
if obj == None:
obj = gdb
obj.pretty_printers.append(pretty_printer_lookup)
class ForeachCommand (gdb.Command):
"""Foreach on list"""
def __init__ (self):
super (ForeachCommand, self).__init__ ("gforeach",
gdb.COMMAND_DATA,
gdb.COMPLETE_SYMBOL)
def valid_name (self, name):
if not name[0].isalpha():
return False
return True
def parse_args (self, arg):
i = arg.find(" ")
if i <= 0:
raise Exception ("No var specified")
var = arg[:i]
if not self.valid_name(var):
raise Exception ("Invalid variable name")
while i < len (arg) and arg[i].isspace():
i = i + 1
if arg[i:i+2] != "in":
raise Exception ("Invalid syntax, missing in")
i = i + 2
while i < len (arg) and arg[i].isspace():
i = i + 1
colon = arg.find (":", i)
if colon == -1:
raise Exception ("Invalid syntax, missing colon")
val = arg[i:colon]
colon = colon + 1
while colon < len (arg) and arg[colon].isspace():
colon = colon + 1
command = arg[colon:]
return (var, val, command)
def do_iter(self, arg, item, command):
item = item.cast (gdb.lookup_type("void").pointer())
item = long(item)
to_eval = "set $%s = (void *)0x%x\n"%(arg, item)
gdb.execute(to_eval)
gdb.execute(command)
def slist_iterator (self, arg, container, command):
l = container.cast (gdb.lookup_type("GSList").pointer())
while long(l) != 0:
self.do_iter (arg, l["data"], command)
l = l["next"]
def list_iterator (self, arg, container, command):
l = container.cast (gdb.lookup_type("GList").pointer())
while long(l) != 0:
self.do_iter (arg, l["data"], command)
l = l["next"]
def pick_iterator (self, container):
t = container.type.unqualified()
if t.code == gdb.TYPE_CODE_PTR:
t = t.target().unqualified()
t = str(t)
if t == "GSList":
return self.slist_iterator
if t == "GList":
return self.list_iterator
raise Exception("Invalid container type %s"%(str(container.type)))
def invoke (self, arg, from_tty):
(var, container, command) = self.parse_args(arg)
container = gdb.parse_and_eval (container)
func = self.pick_iterator(container)
func(var, container, command)
ForeachCommand ()
| jonnyniv/boost_converter | host/gui/GTK+/share/glib-2.0/gdb/glib.py | Python | apache-2.0 | 7,426 | 0.010908 |
import socket
UDP_IP = "0.0.0.0"
UDP_PORT = 5005
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((UDP_IP, UDP_PORT))
packet_stack = {}
sequence_pointer = 1
peak_stack_size = 0
print "Listening on IP %s, Port: %s" % (UDP_IP, UDP_PORT)
def decode_packet(packet):
seq = int(packet[0:16])
size = int(packet[16:32])
pl = packet[32:]
return (seq, size, pl)
def dump_state(peak_stack_size, packet_stack):
print "------ Report ------"
print "Peak Stack Size: %i" % peak_stack_size
stack_size = len(packet_stack)
print "Curent Stack Size: %i" % stack_size
if stack_size == 0:
print "Stack is clean."
return
highest_packet_seq = 0
lowest_packet_seq = 999999
for packet in packet_stack:
if packet > highest_packet_seq:
highest_packet_seq = packet
if packet < lowest_packet_seq:
lowest_packet_seq = packet
print "Lowest: %i Highest: %i" % (lowest_packet_seq, highest_packet_seq)
missing_packets = 0
for i in range(lowest_packet_seq, highest_packet_seq):
if i not in packet_stack:
missing_packets += 1
print "Missing packet between %i and %i is %i" % (lowest_packet_seq, highest_packet_seq, missing_packets)
try:
while True:
data, addr = sock.recvfrom(64536) # 64K Buffer Size
(seq, size, pl) = decode_packet(data)
print "Sequence Number: %i Size: %i" % ( seq, size)
print "Src IP: %s Src Port: %s" % addr
# print "Payload: '%s'" % pl
print "Data: '%s'" % data
# Payload starting with C is for Control commands
print "L2 Preamble: '%s'" % pl[0:1]
if pl[0:1] == "C":
command = int(pl[1:5])
print "Command: '%s'" % command
if command == 1:
print "Command 1: Display the debug trace."
dump_state(peak_stack_size, packet_stack)
elif command == 2:
print "Command 2: Clear the stack and reset the sequence_pointer."
sequence_pointer = 1
peak_stack_size = 0
packet_stack.clear()
print "\n"
continue
elif command == 3:
print "Command 3: Exit."
exit(0)
if len(data) == size:
print "Packet size validation confirmed."
else:
print "Packet size error! %i != %i" % (len(data), size)
raise Exception("Packet Size Error.")
if(seq == sequence_pointer):
print "Received packet (%i) in sequence, passing over." % sequence_pointer
sequence_pointer += 1
while sequence_pointer in packet_stack:
print "Next packet (%i) found in stack, poping out of stack." % sequence_pointer
packet_stack.pop(sequence_pointer, None)
sequence_pointer += 1
else:
print "Received packet seq %i out of order, pushing onto stack." % seq
packet_stack[seq] = data
stack_size = len(packet_stack)
print "Current Stack Size: %i" % stack_size
if stack_size > peak_stack_size:
peak_stack_size = stack_size
print "\n"
except KeyboardInterrupt:
dump_state(peak_stack_size, packet_stack)
except:
print "ERROR!"
print "Data: '%s'" % data
print addr
print "Sequence Index: %i" % sequence_pointer
print "Peak Stack Size: %i" % peak_stack_size
stack_size = len(packet_stack)
print "Curent Stack Size: %i" % stack_size
raise
| cdemers/networktools | UDPNetTests/test_server.py | Python | mit | 3,592 | 0.003898 |
from django import template
from django.utils.encoding import smart_str
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db.models import get_model
from django.db.models.query import QuerySet
register = template.Library()
class GroupURLNode(template.Node):
def __init__(self, view_name, group, kwargs, asvar):
self.view_name = view_name
self.group = group
self.kwargs = kwargs
self.asvar = asvar
def render(self, context):
url = ""
group = self.group.resolve(context)
kwargs = {}
for k, v in self.kwargs.items():
kwargs[smart_str(k, "ascii")] = v.resolve(context)
if group:
bridge = group.content_bridge
try:
url = bridge.reverse(self.view_name, group, kwargs=kwargs)
except NoReverseMatch:
if self.asvar is None:
raise
else:
try:
url = reverse(self.view_name, kwargs=kwargs)
except NoReverseMatch:
if self.asvar is None:
raise
if self.asvar:
context[self.asvar] = url
return ""
else:
return url
class ContentObjectsNode(template.Node):
def __init__(self, group_var, model_name_var, context_var):
self.group_var = template.Variable(group_var)
self.model_name_var = template.Variable(model_name_var)
self.context_var = context_var
def render(self, context):
group = self.group_var.resolve(context)
model_name = self.model_name_var.resolve(context)
if isinstance(model_name, QuerySet):
model = model_name
else:
app_name, model_name = model_name.split(".")
model = get_model(app_name, model_name)
context[self.context_var] = group.content_objects(model)
return ""
@register.tag
def groupurl(parser, token):
bits = token.contents.split()
tag_name = bits[0]
if len(bits) < 3:
raise template.TemplateSyntaxError("'%s' takes at least two arguments"
" (path to a view and a group)" % tag_name)
view_name = bits[1]
group = parser.compile_filter(bits[2])
args = []
kwargs = {}
asvar = None
if len(bits) > 3:
bits = iter(bits[3:])
for bit in bits:
if bit == "as":
asvar = bits.next()
break
else:
for arg in bit.split(","):
if "=" in arg:
k, v = arg.split("=", 1)
k = k.strip()
kwargs[k] = parser.compile_filter(v)
elif arg:
raise template.TemplateSyntaxError("'%s' does not support non-kwargs arguments." % tag_name)
return GroupURLNode(view_name, group, kwargs, asvar)
@register.tag
def content_objects(parser, token):
"""
{% content_objects group "tasks.Task" as tasks %}
"""
bits = token.split_contents()
if len(bits) != 5:
raise template.TemplateSyntaxError("'%s' requires five arguments." % bits[0])
return ContentObjectsNode(bits[1], bits[2], bits[4])
| ericholscher/pinax | pinax/apps/groups/templatetags/group_tags.py | Python | mit | 3,311 | 0.003926 |
'''
Created on Dec 2, 2015
@author: Sameer Adhikari
'''
# Class that represents the common operations between
# composite and leaf/primitive nodes in the hierarchy
# This is a simulation which lacks a lot of operations
class Component(object):
def __init__(self, name):
self.name = name
def move(self, destination_path):
destination_folder = get_folder(destination_path)
del self.parent.children[self.name] # Remove folder from current location
destination_folder.children[self.name] = self # Move to new folder location
self.parent = destination_folder # Set up traversal path to root
def delete(self):
del self.parent.children[self.name] # Remove folder from current location
def add_child(self, child):
child.parent = self
self.children[child.name] = child
# Class that represent the composite node in the hierarchy
class Folder(Component):
def __init__(self, name):
super().__init__(name)
self.children = {} # A folder can have folders or files
# Class the represents the leaf/primitve node, which does not have children
class File(Component):
def __init__(self, name, contents):
super().__init__(name)
self.contents = contents
# Module level variable to represent the root of a filesystem
root = Folder('')
# This function causes a cyclic dependency.
# It operates on component but requires a root folder.
# But, folder is a subclass of component.
# Python's dynamic typing and handling of module variables helps out.
def get_folder(path):
''' Returns the folder node to which the string path refers '''
folders_along_path = path.split('/')[1:] # Ignore the initial empty string from split
node = root # Start at the top
for folder_name in folders_along_path: # Traverse down the tree
node = node.children[folder_name] # Get pointer to the node at the current tree level
return node
| tri2sing/PyOO | patterns/composite/entities.py | Python | gpl-2.0 | 1,989 | 0.012569 |
from .models import Site
class SitesMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
request.site = Site.objects._get_for_request(request)
return self.get_response(request)
| veselosky/webquills | webquills/sites/middleware.py | Python | apache-2.0 | 275 | 0 |
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ron Dreslinski
import m5
from m5.objects import *
m5.util.addToPath('../configs/common')
from Caches import *
nb_cores = 4
cpus = [ DerivO3CPU(cpu_id=i) for i in xrange(nb_cores) ]
# system simulated
system = System(cpu = cpus, physmem = SimpleMemory(), membus = CoherentBus())
# l2cache & bus
system.toL2Bus = CoherentBus(clock = '2GHz')
system.l2c = L2(clock = '2GHz', size='4MB', assoc=8)
system.l2c.cpu_side = system.toL2Bus.master
# connect l2c to membus
system.l2c.mem_side = system.membus.slave
# add L1 caches
for cpu in cpus:
cpu.addPrivateSplitL1Caches(L1(size = '32kB', assoc = 1),
L1(size = '32kB', assoc = 4))
# create the interrupt controller
cpu.createInterruptController()
# connect cpu level-1 caches to shared level-2 cache
cpu.connectAllPorts(system.toL2Bus, system.membus)
cpu.clock = '2GHz'
# connect memory to membus
system.physmem.port = system.membus.master
# connect system port to membus
system.system_port = system.membus.slave
# -----------------------
# run simulation
# -----------------------
root = Root( full_system = False, system = system )
root.system.mem_mode = 'timing'
#root.trace.flags="Bus Cache"
#root.trace.flags = "BusAddrRanges"
| hoangt/gem5v | tests/configs/o3-timing-mp.py | Python | bsd-3-clause | 2,805 | 0.010339 |
#!/usr/bin/env python
from distutils.core import setup
setup(name = "comic-utils",
version = "0.4",
description = "Comic Utils",
author = "Milan Nikolic",
author_email = "[email protected]",
license = "GNU GPLv3",
url = "https://github.com/gen2brain/comic-utils",
packages = ["comicutils", "comicutils.ui"],
package_dir = {"comicutils": "comicutils"},
scripts = ["comic-convert", "comic-thumbnails"],
requires = ["Image", "PngImagePlugin"],
platforms = ["Linux", "Windows"]
)
| gen2brain/comic-utils | setup.py | Python | gpl-3.0 | 576 | 0.0625 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from alembic import op
import sqlalchemy as sa
from neutron_lib.db import constants as db_const
from neutron.db import migration
"""fip qos
Revision ID: 594422d373ee
Revises: 7d32f979895f
Create Date: 2016-04-26 17:16:10.323756
"""
# revision identifiers, used by Alembic.
revision = '594422d373ee'
down_revision = '7d32f979895f'
# milestone identifier, used by neutron-db-manage
neutron_milestone = [migration.QUEENS]
def upgrade():
op.create_table(
'qos_fip_policy_bindings',
sa.Column('policy_id',
sa.String(length=db_const.UUID_FIELD_SIZE),
sa.ForeignKey('qos_policies.id', ondelete='CASCADE'),
nullable=False),
sa.Column('fip_id',
sa.String(length=db_const.UUID_FIELD_SIZE),
sa.ForeignKey('floatingips.id', ondelete='CASCADE'),
nullable=False, unique=True))
| noironetworks/neutron | neutron/db/migration/alembic_migrations/versions/queens/expand/594422d373ee_fip_qos.py | Python | apache-2.0 | 1,485 | 0 |
"""Development settings and globals."""
from os.path import join, normpath
from base import *
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
########## END EMAIL CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': normpath(join(DJANGO_ROOT, 'default.db')),
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
########## END CACHE CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('127.0.0.1',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
########## END TOOLBAR CONFIGURATION
| browning/shows | shows/shows/settings/local.py | Python | mit | 1,629 | 0.00798 |
import pytest
from app import models
from app.extensions import db
@pytest.fixture(scope='function')
def mock_picture():
def make_mock_picture(user=None, tags=None, despriction=None, address=None):
_picture = models.Picture(
userId=str(user.id),
despriction=despriction or 'testdes',
address=address or 'testaddress'
)
_picture.tags = [
models.Tag(tag=tags or 'testtags')
]
db.session.add(_picture)
db.session.commit()
# _tags = models.Tags(
# picId=_picture.id,
# tag='testtag'
# )
# db.session.add(_tags)
# db.session.commit()
return _picture
return make_mock_picture
| eightHundreds/irides | tests/mocks/pictures.py | Python | gpl-3.0 | 743 | 0.002692 |
#!/usr/bin/env python
# Jonas Schnelli, 2013
# make sure the Nichts-Qt.app contains the right plist (including the right version)
# fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267)
from string import Template
from datetime import date
bitcoinDir = "./";
inFile = bitcoinDir+"/share/qt/Info.plist"
outFile = "Nichts-Qt.app/Contents/Info.plist"
version = "unknown";
fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro"
for line in open(fileForGrabbingVersion):
lineArr = line.replace(" ", "").split("=");
if lineArr[0].startswith("VERSION"):
version = lineArr[1].replace("\n", "");
fIn = open(inFile, "r")
fileContent = fIn.read()
s = Template(fileContent)
newFileContent = s.substitute(VERSION=version,YEAR=date.today().year)
fOut = open(outFile, "w");
fOut.write(newFileContent);
print "Info.plist fresh created"
| eysho/BestKnownGame-Coins---Source | share/qt/clean_mac_info_plist.py | Python | mit | 922 | 0.016269 |
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from django.utils import unittest
from django.test.client import RequestFactory
from django.db.models import query
from django.contrib.admin.sites import AdminSite
from cities_light import admin as cl_admin
from cities_light import models as cl_models
class AdminTestCase(unittest.TestCase):
def setUp(self):
self.factory = RequestFactory()
self.admin_site = AdminSite()
def testCityChangeList(self):
request = self.factory.get('/some/path/', data={'q': 'some query'})
city_admin = cl_admin.CityAdmin(cl_models.City, self.admin_site)
changelist = cl_admin.CityChangeList(
request, cl_models.City, cl_admin.CityAdmin.list_display,
cl_admin.CityAdmin.list_display_links, cl_admin.CityAdmin.list_filter,
cl_admin.CityAdmin.date_hierarchy, cl_admin.CityAdmin.search_fields,
cl_admin.CityAdmin.list_select_related, cl_admin.CityAdmin.list_per_page,
cl_admin.CityAdmin.list_max_show_all, cl_admin.CityAdmin.list_editable, city_admin)
self.assertIsInstance(changelist.get_query_set(request), query.QuerySet)
| KevinGrahamFoster/django-cities-light | test_project/tests.py | Python | mit | 1,189 | 0.004205 |
# Copyright 2021 Google LLC.
# Copyright (c) Microsoft Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asyncio
import json
import os
import pytest
import websockets
_command_counter = 1
def get_next_command_id():
global _command_counter
_command_counter += 1
return _command_counter
@pytest.fixture
async def websocket():
port = os.getenv('PORT', 8080)
url = f'ws://localhost:{port}'
async with websockets.connect(url) as connection:
yield connection
# noinspection PyUnusedFunction
@pytest.fixture
async def context_id(websocket):
# Note: there can be a race condition between initially created context's
# events and following subscription commands. Sometimes subscribe is called
# before the initial context emitted `browsingContext.contextCreated`,
# `browsingContext.domContentLoaded`, or `browsingContext.load` events,
# which makes events verification way harder. Navigation command guarantees
# there will be no follow-up events, as it uses `interactive` flag.
# TODO: find a way to avoid mentioned race condition properly.
open_context_id = await get_open_context_id(websocket)
await goto_url(websocket, open_context_id, "about:blank")
return open_context_id
@pytest.fixture(autouse=True)
async def before_each_test(websocket):
# This method can be used for browser state preparation.
assert True
async def subscribe(websocket, event_names, context_ids=None):
if isinstance(event_names, str):
event_names = [event_names]
command = {
"method": "session.subscribe",
"params": {
"events": event_names}}
if context_ids is not None:
command["params"]["contexts"] = context_ids
await execute_command(websocket, command)
# Compares 2 objects recursively ignoring values of specific attributes.
def recursiveCompare(expected, actual, ignore_attributes=[]):
assert type(expected) == type(actual)
if type(expected) is list:
assert len(expected) == len(actual)
for index, val in enumerate(expected):
recursiveCompare(expected[index], actual[index], ignore_attributes)
return
if type(expected) is dict:
assert expected.keys() == actual.keys(), \
f"Key sets should be the same: " \
f"\nNot present: {set(expected.keys()) - set(actual.keys())}" \
f"\nUnexpected: {set(actual.keys()) - set(expected.keys())}"
for index, val in enumerate(expected):
if val not in ignore_attributes:
recursiveCompare(expected[val], actual[val], ignore_attributes)
return
assert expected == actual
# Returns an id of an open context.
async def get_open_context_id(websocket):
result = await execute_command(websocket, {
"method": "browsingContext.getTree",
"params": {}})
return result['contexts'][0]['context']
async def send_JSON_command(websocket, command):
if 'id' not in command:
command_id = get_next_command_id()
command['id'] = command_id
await websocket.send(json.dumps(command))
async def read_JSON_message(websocket):
return json.loads(await websocket.recv())
# Open given URL in the given context.
async def goto_url(websocket, context_id, url):
await execute_command(websocket, {
"method": "browsingContext.navigate",
"params": {
"url": url,
"context": context_id,
"wait": "interactive"}})
# noinspection PySameParameterValue
async def execute_command(websocket, command, result_field='result'):
command_id = get_next_command_id()
command['id'] = command_id
await send_JSON_command(websocket, command)
while True:
# Wait for the command to be finished.
resp = await read_JSON_message(websocket)
if 'id' in resp and resp['id'] == command_id:
assert result_field in resp, \
f"Field `{result_field}` should be in the result object:" \
f"\n {resp}"
return resp[result_field]
# Wait and return a specific event from Bidi server
async def wait_for_event(websocket, event_method):
while True:
event_response = await read_JSON_message(websocket)
if 'method' in event_response and event_response['method'] == event_method:
return event_response
| GoogleChromeLabs/chromium-bidi | tests/_helpers.py | Python | apache-2.0 | 4,890 | 0.000613 |
from django.db import models
from .querysets import SongQuerySet
class SongManager(models.Manager):
def get_queryset(self):
return SongQuerySet(self.model, using=self._db)
def available(self):
return self.get_queryset().songs().enabled().published()
| RecursiveGreen/spradio-django | savepointradio/radio/managers.py | Python | mit | 278 | 0 |
from setuptools import setup
def readme():
with open('README.rst.example') as f:
return f.read()
setup(name='manifold_gui',
version='0.1',
description='GUI for a manifold technique',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Alpha',
'Environment :: Console',
'Environment :: X11 Applications',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python :: 2.7 :: chimera',
'Intended Audience :: End Users/Desktop',
],
keywords='manifold chimera',
author='Hstau Y Liao',
platform='linux chimera',
author_email='[email protected]',
packages=['gui'],
include_package_data=True,
zip_safe=False)
| hstau/manifold-cryo | setup.py | Python | gpl-2.0 | 792 | 0.002525 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Movie.studio'
db.alter_column(u'movie_library_movie', 'studio_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['movie_library.Studio'], null=True))
def backwards(self, orm):
# Changing field 'Movie.studio'
db.alter_column(u'movie_library_movie', 'studio_id', self.gf('django.db.models.fields.related.ForeignKey')(default='', to=orm['movie_library.Studio']))
models = {
u'movie_library.actor': {
'Meta': {'object_name': 'Actor'},
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'movie_library.director': {
'Meta': {'object_name': 'Director'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'nick_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'})
},
u'movie_library.genre': {
'Meta': {'object_name': 'Genre'},
'explanation': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'movie_library.movie': {
'Meta': {'object_name': 'Movie'},
'actor': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Actor']", 'symmetrical': 'False'}),
'cover_art': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'director': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Director']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'release_year': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'studio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['movie_library.Studio']", 'null': 'True', 'blank': 'True'}),
'synopsis': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'writer': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Writer']", 'symmetrical': 'False'})
},
u'movie_library.studio': {
'Meta': {'object_name': 'Studio'},
'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'state_province': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
u'movie_library.writer': {
'Meta': {'object_name': 'Writer'},
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['movie_library'] | atimothee/django-playground | django_playground/movie_library/migrations/0006_auto__chg_field_movie_studio.py | Python | bsd-3-clause | 4,380 | 0.007763 |
#!/usr/bin/env python
"""
runtests.py [OPTIONS] [-- ARGS]
Run tests, building the project first.
Examples::
$ python runtests.py
$ python runtests.py -s {SAMPLE_SUBMODULE}
$ python runtests.py -t {SAMPLE_TEST}
$ python runtests.py --ipython
$ python runtests.py --python somescript.py
$ python runtests.py --bench
Run a debugger:
$ gdb --args python runtests.py [...other args...]
Generate C code coverage listing under build/lcov/:
(requires http://ltp.sourceforge.net/coverage/lcov.php)
$ python runtests.py --gcov [...other args...]
$ python runtests.py --lcov-html
"""
#
# This is a generic test runner script for projects using Numpy's test
# framework. Change the following values to adapt to your project:
#
PROJECT_MODULE = "numpy"
PROJECT_ROOT_FILES = ['numpy', 'LICENSE.txt', 'setup.py']
SAMPLE_TEST = "numpy/linalg/tests/test_linalg.py:test_byteorder_check"
SAMPLE_SUBMODULE = "linalg"
EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
'/usr/local/lib/ccache', '/usr/local/lib/f90cache']
# ---------------------------------------------------------------------
if __doc__ is None:
__doc__ = "Run without -OO if you want usage info"
else:
__doc__ = __doc__.format(**globals())
import sys
import os
# In case we are run from the source directory, we don't want to import the
# project from there:
sys.path.pop(0)
import shutil
import subprocess
import time
import imp
from argparse import ArgumentParser, REMAINDER
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
def main(argv):
parser = ArgumentParser(usage=__doc__.lstrip())
parser.add_argument("--verbose", "-v", action="count", default=1,
help="more verbosity")
parser.add_argument("--no-build", "-n", action="store_true", default=False,
help="do not build the project (use system installed version)")
parser.add_argument("--build-only", "-b", action="store_true", default=False,
help="just build, do not run any tests")
parser.add_argument("--doctests", action="store_true", default=False,
help="Run doctests in module")
parser.add_argument("--coverage", action="store_true", default=False,
help=("report coverage of project code. HTML output goes "
"under build/coverage"))
parser.add_argument("--gcov", action="store_true", default=False,
help=("enable C code coverage via gcov (requires GCC). "
"gcov output goes to build/**/*.gc*"))
parser.add_argument("--lcov-html", action="store_true", default=False,
help=("produce HTML for C code coverage information "
"from a previous run with --gcov. "
"HTML output goes to build/lcov/"))
parser.add_argument("--mode", "-m", default="fast",
help="'fast', 'full', or something that could be "
"passed to nosetests -A [default: fast]")
parser.add_argument("--submodule", "-s", default=None,
help="Submodule whose tests to run (cluster, constants, ...)")
parser.add_argument("--pythonpath", "-p", default=None,
help="Paths to prepend to PYTHONPATH")
parser.add_argument("--tests", "-t", action='append',
help="Specify tests to run")
parser.add_argument("--python", action="store_true",
help="Start a Python shell with PYTHONPATH set")
parser.add_argument("--ipython", "-i", action="store_true",
help="Start IPython shell with PYTHONPATH set")
parser.add_argument("--shell", action="store_true",
help="Start Unix shell with PYTHONPATH set")
parser.add_argument("--debug", "-g", action="store_true",
help="Debug build")
parser.add_argument("--parallel", "-j", type=int, default=0,
help="Number of parallel jobs during build")
parser.add_argument("--show-build-log", action="store_true",
help="Show build output rather than using a log file")
parser.add_argument("--bench", action="store_true",
help="Run benchmark suite instead of test suite")
parser.add_argument("--bench-compare", action="store", metavar="COMMIT",
help=("Compare benchmark results to COMMIT. "
"Note that you need to commit your changes first!"))
parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER,
help="Arguments to pass to Nose, Python or shell")
args = parser.parse_args(argv)
if args.bench_compare:
args.bench = True
args.no_build = True # ASV does the building
if args.lcov_html:
# generate C code coverage output
lcov_generate()
sys.exit(0)
if args.pythonpath:
for p in reversed(args.pythonpath.split(os.pathsep)):
sys.path.insert(0, p)
if args.gcov:
gcov_reset_counters()
if args.debug and args.bench:
print("*** Benchmarks should not be run against debug "
"version; remove -g flag ***")
if not args.no_build:
site_dir = build_project(args)
sys.path.insert(0, site_dir)
os.environ['PYTHONPATH'] = site_dir
extra_argv = args.args[:]
if extra_argv and extra_argv[0] == '--':
extra_argv = extra_argv[1:]
if args.python:
# Debugging issues with warnings is much easier if you can see them
print("Enabling display of all warnings")
import warnings; warnings.filterwarnings("always")
if extra_argv:
# Don't use subprocess, since we don't want to include the
# current path in PYTHONPATH.
sys.argv = extra_argv
with open(extra_argv[0], 'r') as f:
script = f.read()
sys.modules['__main__'] = imp.new_module('__main__')
ns = dict(__name__='__main__',
__file__=extra_argv[0])
exec_(script, ns)
sys.exit(0)
else:
import code
code.interact()
sys.exit(0)
if args.ipython:
# Debugging issues with warnings is much easier if you can see them
print("Enabling display of all warnings and pre-importing numpy as np")
import warnings; warnings.filterwarnings("always")
import IPython
import numpy as np
IPython.embed(user_ns={"np": np})
sys.exit(0)
if args.shell:
shell = os.environ.get('SHELL', 'sh')
print("Spawning a Unix shell...")
os.execv(shell, [shell] + extra_argv)
sys.exit(1)
if args.coverage:
dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage')
fn = os.path.join(dst_dir, 'coverage_html.js')
if os.path.isdir(dst_dir) and os.path.isfile(fn):
shutil.rmtree(dst_dir)
extra_argv += ['--cover-html',
'--cover-html-dir='+dst_dir]
if args.bench:
# Run ASV
items = extra_argv
if args.tests:
items += args.tests
if args.submodule:
items += [args.submodule]
bench_args = []
for a in items:
bench_args.extend(['--bench', a])
if not args.bench_compare:
cmd = ['asv', 'run', '-n', '-e', '--python=same'] + bench_args
os.chdir(os.path.join(ROOT_DIR, 'benchmarks'))
os.execvp(cmd[0], cmd)
sys.exit(1)
else:
commits = [x.strip() for x in args.bench_compare.split(',')]
if len(commits) == 1:
commit_a = commits[0]
commit_b = 'HEAD'
elif len(commits) == 2:
commit_a, commit_b = commits
else:
p.error("Too many commits to compare benchmarks for")
# Check for uncommitted files
if commit_b == 'HEAD':
r1 = subprocess.call(['git', 'diff-index', '--quiet',
'--cached', 'HEAD'])
r2 = subprocess.call(['git', 'diff-files', '--quiet'])
if r1 != 0 or r2 != 0:
print("*"*80)
print("WARNING: you have uncommitted changes --- "
"these will NOT be benchmarked!")
print("*"*80)
# Fix commit ids (HEAD is local to current repo)
p = subprocess.Popen(['git', 'rev-parse', commit_b],
stdout=subprocess.PIPE)
out, err = p.communicate()
commit_b = out.strip()
p = subprocess.Popen(['git', 'rev-parse', commit_a],
stdout=subprocess.PIPE)
out, err = p.communicate()
commit_a = out.strip()
cmd = ['asv', 'continuous', '-e', '-f', '1.05',
commit_a, commit_b] + bench_args
os.chdir(os.path.join(ROOT_DIR, 'benchmarks'))
os.execvp(cmd[0], cmd)
sys.exit(1)
test_dir = os.path.join(ROOT_DIR, 'build', 'test')
if args.build_only:
sys.exit(0)
elif args.submodule:
modname = PROJECT_MODULE + '.' + args.submodule
try:
__import__(modname)
test = sys.modules[modname].test
except (ImportError, KeyError, AttributeError):
print("Cannot run tests for %s" % modname)
sys.exit(2)
elif args.tests:
def fix_test_path(x):
# fix up test path
p = x.split(':')
p[0] = os.path.relpath(os.path.abspath(p[0]),
test_dir)
return ':'.join(p)
tests = [fix_test_path(x) for x in args.tests]
def test(*a, **kw):
extra_argv = kw.pop('extra_argv', ())
extra_argv = extra_argv + tests[1:]
kw['extra_argv'] = extra_argv
from numpy.testing import Tester
return Tester(tests[0]).test(*a, **kw)
else:
__import__(PROJECT_MODULE)
test = sys.modules[PROJECT_MODULE].test
# Run the tests under build/test
try:
shutil.rmtree(test_dir)
except OSError:
pass
try:
os.makedirs(test_dir)
except OSError:
pass
cwd = os.getcwd()
try:
os.chdir(test_dir)
result = test(args.mode,
verbose=args.verbose,
extra_argv=extra_argv,
doctests=args.doctests,
coverage=args.coverage)
finally:
os.chdir(cwd)
if result.wasSuccessful():
sys.exit(0)
else:
sys.exit(1)
def build_project(args):
"""
Build a dev version of the project.
Returns
-------
site_dir
site-packages directory where it was installed
"""
root_ok = [os.path.exists(os.path.join(ROOT_DIR, fn))
for fn in PROJECT_ROOT_FILES]
if not all(root_ok):
print("To build the project, run runtests.py in "
"git checkout or unpacked source")
sys.exit(1)
dst_dir = os.path.join(ROOT_DIR, 'build', 'testenv')
env = dict(os.environ)
cmd = [sys.executable, 'setup.py']
# Always use ccache, if installed
env['PATH'] = os.pathsep.join(EXTRA_PATH + env.get('PATH', '').split(os.pathsep))
if args.debug or args.gcov:
# assume everyone uses gcc/gfortran
env['OPT'] = '-O0 -ggdb'
env['FOPT'] = '-O0 -ggdb'
if args.gcov:
import distutils.sysconfig
cvars = distutils.sysconfig.get_config_vars()
env['OPT'] = '-O0 -ggdb'
env['FOPT'] = '-O0 -ggdb'
env['CC'] = cvars['CC'] + ' --coverage'
env['CXX'] = cvars['CXX'] + ' --coverage'
env['F77'] = 'gfortran --coverage '
env['F90'] = 'gfortran --coverage '
env['LDSHARED'] = cvars['LDSHARED'] + ' --coverage'
env['LDFLAGS'] = " ".join(cvars['LDSHARED'].split()[1:]) + ' --coverage'
cmd += ["build"]
if args.parallel > 1:
cmd += ["-j", str(args.parallel)]
cmd += ['install', '--prefix=' + dst_dir]
log_filename = os.path.join(ROOT_DIR, 'build.log')
if args.show_build_log:
ret = subprocess.call(cmd, env=env, cwd=ROOT_DIR)
else:
log_filename = os.path.join(ROOT_DIR, 'build.log')
print("Building, see build.log...")
with open(log_filename, 'w') as log:
p = subprocess.Popen(cmd, env=env, stdout=log, stderr=log,
cwd=ROOT_DIR)
# Wait for it to finish, and print something to indicate the
# process is alive, but only if the log file has grown (to
# allow continuous integration environments kill a hanging
# process accurately if it produces no output)
last_blip = time.time()
last_log_size = os.stat(log_filename).st_size
while p.poll() is None:
time.sleep(0.5)
if time.time() - last_blip > 60:
log_size = os.stat(log_filename).st_size
if log_size > last_log_size:
print(" ... build in progress")
last_blip = time.time()
last_log_size = log_size
ret = p.wait()
if ret == 0:
print("Build OK")
else:
if not args.show_build_log:
with open(log_filename, 'r') as f:
print(f.read())
print("Build failed!")
sys.exit(1)
from distutils.sysconfig import get_python_lib
site_dir = get_python_lib(prefix=dst_dir, plat_specific=True)
return site_dir
#
# GCOV support
#
def gcov_reset_counters():
print("Removing previous GCOV .gcda files...")
build_dir = os.path.join(ROOT_DIR, 'build')
for dirpath, dirnames, filenames in os.walk(build_dir):
for fn in filenames:
if fn.endswith('.gcda') or fn.endswith('.da'):
pth = os.path.join(dirpath, fn)
os.unlink(pth)
#
# LCOV support
#
LCOV_OUTPUT_FILE = os.path.join(ROOT_DIR, 'build', 'lcov.out')
LCOV_HTML_DIR = os.path.join(ROOT_DIR, 'build', 'lcov')
def lcov_generate():
try: os.unlink(LCOV_OUTPUT_FILE)
except OSError: pass
try: shutil.rmtree(LCOV_HTML_DIR)
except OSError: pass
print("Capturing lcov info...")
subprocess.call(['lcov', '-q', '-c',
'-d', os.path.join(ROOT_DIR, 'build'),
'-b', ROOT_DIR,
'--output-file', LCOV_OUTPUT_FILE])
print("Generating lcov HTML output...")
ret = subprocess.call(['genhtml', '-q', LCOV_OUTPUT_FILE,
'--output-directory', LCOV_HTML_DIR,
'--legend', '--highlight'])
if ret != 0:
print("genhtml failed!")
else:
print("HTML output generated under build/lcov/")
#
# Python 3 support
#
if sys.version_info[0] >= 3:
import builtins
exec_ = getattr(builtins, "exec")
else:
def exec_(code, globs=None, locs=None):
"""Execute code in a namespace."""
if globs is None:
frame = sys._getframe(1)
globs = frame.f_globals
if locs is None:
locs = frame.f_locals
del frame
elif locs is None:
locs = globs
exec("""exec code in globs, locs""")
if __name__ == "__main__":
main(argv=sys.argv[1:])
| moreati/numpy | runtests.py | Python | bsd-3-clause | 15,724 | 0.001717 |
__author__ = 'Andy Gallagher <[email protected]>'
import xml.etree.ElementTree as ET
import dateutil.parser
from .vidispine_api import always_string
class VSMetadata:
def __init__(self, initial_data={}):
self.contentDict=initial_data
self.primaryGroup = None
def addValue(self,key,value):
if key in self.contentDict:
self.contentDict[key].append(value)
else:
self.contentDict[key]=[]
self.contentDict[key].append(value)
def setPrimaryGroup(self,g):
self.primaryGroup = g
def toXML(self,mdGroup=None):
from datetime import datetime
xmldoc=ET.ElementTree()
ns = "{http://xml.vidispine.com/schema/vidispine}"
rootEl=ET.Element('{0}MetadataDocument'.format(ns))
xmldoc._setroot(rootEl)
timespanEl=ET.Element('{0}timespan'.format(ns),
attrib={'start': '-INF',
'end': '+INF'})
rootEl.append(timespanEl)
if mdGroup is None and self.primaryGroup is not None:
mdGroup = self.primaryGroup
if(mdGroup):
groupEl=ET.Element('{0}group'.format(ns))
groupEl.text=mdGroup
rootEl.append(groupEl)
for key,value in list(self.contentDict.items()):
fieldEl=ET.Element('{0}field'.format(ns))
nameEl=ET.Element('{0}name'.format(ns))
nameEl.text = key
fieldEl.append(nameEl)
if not isinstance(value,list):
value = [value]
for line in value:
valueEl=ET.Element('{0}value'.format(ns))
if isinstance(line,datetime):
line = line.strftime("%Y-%m-%dT%H:%M:%S%Z")
valueEl.text = always_string(line)
fieldEl.append(valueEl)
timespanEl.append(fieldEl)
return ET.tostring(rootEl,encoding="utf8").decode("utf8")
class VSMetadataMixin(object):
_xmlns = "{http://xml.vidispine.com/schema/vidispine}"
@staticmethod
def _safe_get_attrib(xmlnode, attribute, default):
try:
return xmlnode.attrib[attribute]
except AttributeError:
return default
@staticmethod
def _safe_get_subvalue(xmlnode, subnode_name, default):
try:
node = xmlnode.find(subnode_name)
if node is not None:
return node.text
else:
return default
except AttributeError:
return default
class VSMetadataValue(VSMetadataMixin):
def __init__(self, valuenode=None, uuid=None):
self.user = None
self.uuid = None
self.timestamp = None
self.change = None
self.value = None
if valuenode is not None:
self.uuid = self._safe_get_attrib(valuenode,"uuid", None)
self.user = self._safe_get_attrib(valuenode, "user", None)
try:
self.timestamp = dateutil.parser.parse(self._safe_get_attrib(valuenode,"timestamp", None))
except TypeError: #dateutil.parser got nothing
self.timestamp = None
self.change = self._safe_get_attrib(valuenode, "change", None)
self.value = valuenode.text
elif uuid is not None:
self.uuid = uuid
def __repr__(self):
return "VSMetadataValue(\"{0}\")".format(self.value)
def __eq__(self, other):
return other.uuid==self.uuid
class VSMetadataReference(VSMetadataMixin):
def __init__(self, refnode=None, uuid=None):
"""
Initialises, either to an empty reference, to an existing uuid or to an xml fragment
:param uuid: string representing the uuid of something to reference
:param refnode: pointer to an elementtree node of <referenced> in a MetadataDocument
"""
if refnode is not None:
self.uuid = self._safe_get_attrib(refnode,"uuid",None)
self.id = self._safe_get_attrib(refnode,"id",None)
self.type = self._safe_get_attrib(refnode,"type",None)
if refnode is None and uuid is not None:
self.uuid=uuid
self.id = None
self.type = None
def __repr__(self):
return "VSMetadataReference {0} to {1} {2}".format(self.uuid,self.type,self.id)
def __eq__(self, other):
return other.uuid==self.uuid
class VSMetadataAttribute(VSMetadataMixin):
"""
this class represents the full metadata present in an xml <field> entry
"""
def __init__(self, fieldnode=None):
if fieldnode is not None:
self.uuid = self._safe_get_attrib(fieldnode,"uuid", None)
self.user = self._safe_get_attrib(fieldnode, "user", None)
try:
self.timestamp = dateutil.parser.parse(self._safe_get_attrib(fieldnode,"timestamp", None))
except TypeError: #dateutil.parser got nothing
self.timestamp = None
self.change = self._safe_get_attrib(fieldnode,"change",None)
self.name = self._safe_get_subvalue(fieldnode, "{0}name".format(self._xmlns), None)
self.values = [VSMetadataValue(value_node) for value_node in fieldnode.findall('{0}value'.format(self._xmlns))]
self.references = [VSMetadataReference(ref_node) for ref_node in fieldnode.findall('{0}referenced'.format(self._xmlns))]
else:
self.uuid = None
self.user = None
self.timestamp = None
self.change = None
self.name = None
self.values = []
self.references = []
def __eq__(self, other):
return other.uuid==self.uuid
| fredex42/gnmvidispine | gnmvidispine/vs_metadata.py | Python | gpl-2.0 | 5,748 | 0.008525 |
"""
Python Blueprint
================
Does not install python itself, only develop and setup tools.
Contains pip helper for other blueprints to use.
**Fabric environment:**
.. code-block:: yaml
blueprints:
- blues.python
"""
from fabric.decorators import task
from refabric.api import run, info
from refabric.context_managers import sudo
from . import debian
__all__ = ['setup']
pip_log_file = '/tmp/pip.log'
@task
def setup():
"""
Install python develop tools
"""
install()
def install():
with sudo():
info('Install python dependencies')
debian.apt_get('install', 'python-dev', 'python-setuptools')
run('easy_install pip')
run('touch {}'.format(pip_log_file))
debian.chmod(pip_log_file, mode=777)
pip('install', 'setuptools', '--upgrade')
def pip(command, *options):
info('Running pip {}', command)
run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
| gelbander/blues | blues/python.py | Python | mit | 997 | 0.002006 |
"""Class to reload platforms."""
from __future__ import annotations
import asyncio
from collections.abc import Iterable
import logging
from typing import Any
from homeassistant import config as conf_util
from homeassistant.const import SERVICE_RELOAD
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.loader import async_get_integration
from homeassistant.setup import async_setup_component
from . import config_per_platform
from .entity_platform import EntityPlatform, async_get_platforms
from .typing import ConfigType
# mypy: disallow-any-generics
_LOGGER = logging.getLogger(__name__)
async def async_reload_integration_platforms(
hass: HomeAssistant, integration_name: str, integration_platforms: Iterable[str]
) -> None:
"""Reload an integration's platforms.
The platform must support being re-setup.
This functionality is only intended to be used for integrations that process
Home Assistant data and make this available to other integrations.
Examples are template, stats, derivative, utility meter.
"""
try:
unprocessed_conf = await conf_util.async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(err)
return
tasks = [
_resetup_platform(
hass, integration_name, integration_platform, unprocessed_conf
)
for integration_platform in integration_platforms
]
await asyncio.gather(*tasks)
async def _resetup_platform(
hass: HomeAssistant,
integration_name: str,
integration_platform: str,
unprocessed_conf: ConfigType,
) -> None:
"""Resetup a platform."""
integration = await async_get_integration(hass, integration_platform)
conf = await conf_util.async_process_component_config(
hass, unprocessed_conf, integration
)
if not conf:
return
root_config: dict[str, Any] = {integration_platform: []}
# Extract only the config for template, ignore the rest.
for p_type, p_config in config_per_platform(conf, integration_platform):
if p_type != integration_name:
continue
root_config[integration_platform].append(p_config)
component = integration.get_component()
if hasattr(component, "async_reset_platform"):
# If the integration has its own way to reset
# use this method.
await component.async_reset_platform(hass, integration_name)
await component.async_setup(hass, root_config)
return
# If it's an entity platform, we use the entity_platform
# async_reset method
platform = async_get_platform_without_config_entry(
hass, integration_name, integration_platform
)
if platform:
await _async_reconfig_platform(platform, root_config[integration_platform])
return
if not root_config[integration_platform]:
# No config for this platform
# and it's not loaded. Nothing to do.
return
await _async_setup_platform(
hass, integration_name, integration_platform, root_config[integration_platform]
)
async def _async_setup_platform(
hass: HomeAssistant,
integration_name: str,
integration_platform: str,
platform_configs: list[dict[str, Any]],
) -> None:
"""Platform for the first time when new configuration is added."""
if integration_platform not in hass.data:
await async_setup_component(
hass, integration_platform, {integration_platform: platform_configs}
)
return
entity_component = hass.data[integration_platform]
tasks = [
entity_component.async_setup_platform(integration_name, p_config)
for p_config in platform_configs
]
await asyncio.gather(*tasks)
async def _async_reconfig_platform(
platform: EntityPlatform, platform_configs: list[dict[str, Any]]
) -> None:
"""Reconfigure an already loaded platform."""
await platform.async_reset()
tasks = [platform.async_setup(p_config) for p_config in platform_configs]
await asyncio.gather(*tasks)
async def async_integration_yaml_config(
hass: HomeAssistant, integration_name: str
) -> ConfigType | None:
"""Fetch the latest yaml configuration for an integration."""
integration = await async_get_integration(hass, integration_name)
return await conf_util.async_process_component_config(
hass, await conf_util.async_hass_config_yaml(hass), integration
)
@callback
def async_get_platform_without_config_entry(
hass: HomeAssistant, integration_name: str, integration_platform_name: str
) -> EntityPlatform | None:
"""Find an existing platform that is not a config entry."""
for integration_platform in async_get_platforms(hass, integration_name):
if integration_platform.config_entry is not None:
continue
if integration_platform.domain == integration_platform_name:
platform: EntityPlatform = integration_platform
return platform
return None
async def async_setup_reload_service(
hass: HomeAssistant, domain: str, platforms: Iterable[str]
) -> None:
"""Create the reload service for the domain."""
if hass.services.has_service(domain, SERVICE_RELOAD):
return
async def _reload_config(call: Event) -> None:
"""Reload the platforms."""
await async_reload_integration_platforms(hass, domain, platforms)
hass.bus.async_fire(f"event_{domain}_reloaded", context=call.context)
hass.helpers.service.async_register_admin_service(
domain, SERVICE_RELOAD, _reload_config
)
def setup_reload_service(
hass: HomeAssistant, domain: str, platforms: Iterable[str]
) -> None:
"""Sync version of async_setup_reload_service."""
asyncio.run_coroutine_threadsafe(
async_setup_reload_service(hass, domain, platforms),
hass.loop,
).result()
| mezz64/home-assistant | homeassistant/helpers/reload.py | Python | apache-2.0 | 5,950 | 0.00084 |
# LayerMapping -- A Django Model/OGR Layer Mapping Utility
"""
The LayerMapping class provides a way to map the contents of OGR
vector files (e.g. SHP files) to Geographic-enabled Django models.
For more information, please consult the GeoDjango documentation:
https://docs.djangoproject.com/en/dev/ref/contrib/gis/layermapping/
"""
import sys
from decimal import Decimal, InvalidOperation as DecimalInvalidOperation
from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.gdal import (
CoordTransform, DataSource, GDALException, OGRGeometry, OGRGeomType,
SpatialReference,
)
from django.contrib.gis.gdal.field import (
OFTDate, OFTDateTime, OFTInteger, OFTReal, OFTString, OFTTime,
)
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import connections, models, router, transaction
from django.utils import six
from django.utils.encoding import force_text
# LayerMapping exceptions.
class LayerMapError(Exception):
pass
class InvalidString(LayerMapError):
pass
class InvalidDecimal(LayerMapError):
pass
class InvalidInteger(LayerMapError):
pass
class MissingForeignKey(LayerMapError):
pass
class LayerMapping(object):
"A class that maps OGR Layers to GeoDjango Models."
# Acceptable 'base' types for a multi-geometry type.
MULTI_TYPES = {1: OGRGeomType('MultiPoint'),
2: OGRGeomType('MultiLineString'),
3: OGRGeomType('MultiPolygon'),
OGRGeomType('Point25D').num: OGRGeomType('MultiPoint25D'),
OGRGeomType('LineString25D').num: OGRGeomType('MultiLineString25D'),
OGRGeomType('Polygon25D').num: OGRGeomType('MultiPolygon25D'),
}
# Acceptable Django field types and corresponding acceptable OGR
# counterparts.
FIELD_TYPES = {
models.AutoField: OFTInteger,
models.IntegerField: (OFTInteger, OFTReal, OFTString),
models.FloatField: (OFTInteger, OFTReal),
models.DateField: OFTDate,
models.DateTimeField: OFTDateTime,
models.EmailField: OFTString,
models.TimeField: OFTTime,
models.DecimalField: (OFTInteger, OFTReal),
models.CharField: OFTString,
models.SlugField: OFTString,
models.TextField: OFTString,
models.URLField: OFTString,
models.BigIntegerField: (OFTInteger, OFTReal, OFTString),
models.SmallIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveSmallIntegerField: (OFTInteger, OFTReal, OFTString),
}
def __init__(self, model, data, mapping, layer=0,
source_srs=None, encoding='utf-8',
transaction_mode='commit_on_success',
transform=True, unique=None, using=None):
"""
A LayerMapping object is initialized using the given Model (not an instance),
a DataSource (or string path to an OGR-supported data file), and a mapping
dictionary. See the module level docstring for more details and keyword
argument usage.
"""
# Getting the DataSource and the associated Layer.
if isinstance(data, six.string_types):
self.ds = DataSource(data, encoding=encoding)
else:
self.ds = data
self.layer = self.ds[layer]
self.using = using if using is not None else router.db_for_write(model)
self.spatial_backend = connections[self.using].ops
# Setting the mapping & model attributes.
self.mapping = mapping
self.model = model
# Checking the layer -- initialization of the object will fail if
# things don't check out before hand.
self.check_layer()
# Getting the geometry column associated with the model (an
# exception will be raised if there is no geometry column).
if connections[self.using].features.supports_transform:
self.geo_field = self.geometry_field()
else:
transform = False
# Checking the source spatial reference system, and getting
# the coordinate transformation object (unless the `transform`
# keyword is set to False)
if transform:
self.source_srs = self.check_srs(source_srs)
self.transform = self.coord_transform()
else:
self.transform = transform
# Setting the encoding for OFTString fields, if specified.
if encoding:
# Making sure the encoding exists, if not a LookupError
# exception will be thrown.
from codecs import lookup
lookup(encoding)
self.encoding = encoding
else:
self.encoding = None
if unique:
self.check_unique(unique)
transaction_mode = 'autocommit' # Has to be set to autocommit.
self.unique = unique
else:
self.unique = None
# Setting the transaction decorator with the function in the
# transaction modes dictionary.
self.transaction_mode = transaction_mode
if transaction_mode == 'autocommit':
self.transaction_decorator = None
elif transaction_mode == 'commit_on_success':
self.transaction_decorator = transaction.atomic
else:
raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode)
# #### Checking routines used during initialization ####
def check_fid_range(self, fid_range):
"This checks the `fid_range` keyword."
if fid_range:
if isinstance(fid_range, (tuple, list)):
return slice(*fid_range)
elif isinstance(fid_range, slice):
return fid_range
else:
raise TypeError
else:
return None
def check_layer(self):
"""
This checks the Layer metadata, and ensures that it is compatible
with the mapping information and model. Unlike previous revisions,
there is no need to increment through each feature in the Layer.
"""
# The geometry field of the model is set here.
# TODO: Support more than one geometry field / model. However, this
# depends on the GDAL Driver in use.
self.geom_field = False
self.fields = {}
# Getting lists of the field names and the field types available in
# the OGR Layer.
ogr_fields = self.layer.fields
ogr_field_types = self.layer.field_types
# Function for determining if the OGR mapping field is in the Layer.
def check_ogr_fld(ogr_map_fld):
try:
idx = ogr_fields.index(ogr_map_fld)
except ValueError:
raise LayerMapError('Given mapping OGR field "%s" not found in OGR Layer.' % ogr_map_fld)
return idx
# No need to increment through each feature in the model, simply check
# the Layer metadata against what was given in the mapping dictionary.
for field_name, ogr_name in self.mapping.items():
# Ensuring that a corresponding field exists in the model
# for the given field name in the mapping.
try:
model_field = self.model._meta.get_field(field_name)
except FieldDoesNotExist:
raise LayerMapError('Given mapping field "%s" not in given Model fields.' % field_name)
# Getting the string name for the Django field class (e.g., 'PointField').
fld_name = model_field.__class__.__name__
if isinstance(model_field, GeometryField):
if self.geom_field:
raise LayerMapError('LayerMapping does not support more than one GeometryField per model.')
# Getting the coordinate dimension of the geometry field.
coord_dim = model_field.dim
try:
if coord_dim == 3:
gtype = OGRGeomType(ogr_name + '25D')
else:
gtype = OGRGeomType(ogr_name)
except GDALException:
raise LayerMapError('Invalid mapping for GeometryField "%s".' % field_name)
# Making sure that the OGR Layer's Geometry is compatible.
ltype = self.layer.geom_type
if not (ltype.name.startswith(gtype.name) or self.make_multi(ltype, model_field)):
raise LayerMapError('Invalid mapping geometry; model has %s%s, '
'layer geometry type is %s.' %
(fld_name, '(dim=3)' if coord_dim == 3 else '', ltype))
# Setting the `geom_field` attribute w/the name of the model field
# that is a Geometry. Also setting the coordinate dimension
# attribute.
self.geom_field = field_name
self.coord_dim = coord_dim
fields_val = model_field
elif isinstance(model_field, models.ForeignKey):
if isinstance(ogr_name, dict):
# Is every given related model mapping field in the Layer?
rel_model = model_field.remote_field.model
for rel_name, ogr_field in ogr_name.items():
idx = check_ogr_fld(ogr_field)
try:
rel_model._meta.get_field(rel_name)
except FieldDoesNotExist:
raise LayerMapError('ForeignKey mapping field "%s" not in %s fields.' %
(rel_name, rel_model.__class__.__name__))
fields_val = rel_model
else:
raise TypeError('ForeignKey mapping must be of dictionary type.')
else:
# Is the model field type supported by LayerMapping?
if model_field.__class__ not in self.FIELD_TYPES:
raise LayerMapError('Django field type "%s" has no OGR mapping (yet).' % fld_name)
# Is the OGR field in the Layer?
idx = check_ogr_fld(ogr_name)
ogr_field = ogr_field_types[idx]
# Can the OGR field type be mapped to the Django field type?
if not issubclass(ogr_field, self.FIELD_TYPES[model_field.__class__]):
raise LayerMapError('OGR field "%s" (of type %s) cannot be mapped to Django %s.' %
(ogr_field, ogr_field.__name__, fld_name))
fields_val = model_field
self.fields[field_name] = fields_val
def check_srs(self, source_srs):
"Checks the compatibility of the given spatial reference object."
if isinstance(source_srs, SpatialReference):
sr = source_srs
elif isinstance(source_srs, self.spatial_backend.spatial_ref_sys()):
sr = source_srs.srs
elif isinstance(source_srs, (int, six.string_types)):
sr = SpatialReference(source_srs)
else:
# Otherwise just pulling the SpatialReference from the layer
sr = self.layer.srs
if not sr:
raise LayerMapError('No source reference system defined.')
else:
return sr
def check_unique(self, unique):
"Checks the `unique` keyword parameter -- may be a sequence or string."
if isinstance(unique, (list, tuple)):
# List of fields to determine uniqueness with
for attr in unique:
if attr not in self.mapping:
raise ValueError
elif isinstance(unique, six.string_types):
# Only a single field passed in.
if unique not in self.mapping:
raise ValueError
else:
raise TypeError('Unique keyword argument must be set with a tuple, list, or string.')
# Keyword argument retrieval routines ####
def feature_kwargs(self, feat):
"""
Given an OGR Feature, this will return a dictionary of keyword arguments
for constructing the mapped model.
"""
# The keyword arguments for model construction.
kwargs = {}
# Incrementing through each model field and OGR field in the
# dictionary mapping.
for field_name, ogr_name in self.mapping.items():
model_field = self.fields[field_name]
if isinstance(model_field, GeometryField):
# Verify OGR geometry.
try:
val = self.verify_geom(feat.geom, model_field)
except GDALException:
raise LayerMapError('Could not retrieve geometry from feature.')
elif isinstance(model_field, models.base.ModelBase):
# The related _model_, not a field was passed in -- indicating
# another mapping for the related Model.
val = self.verify_fk(feat, model_field, ogr_name)
else:
# Otherwise, verify OGR Field type.
val = self.verify_ogr_field(feat[ogr_name], model_field)
# Setting the keyword arguments for the field name with the
# value obtained above.
kwargs[field_name] = val
return kwargs
def unique_kwargs(self, kwargs):
"""
Given the feature keyword arguments (from `feature_kwargs`) this routine
will construct and return the uniqueness keyword arguments -- a subset
of the feature kwargs.
"""
if isinstance(self.unique, six.string_types):
return {self.unique: kwargs[self.unique]}
else:
return {fld: kwargs[fld] for fld in self.unique}
# #### Verification routines used in constructing model keyword arguments. ####
def verify_ogr_field(self, ogr_field, model_field):
"""
Verifies if the OGR Field contents are acceptable to the Django
model field. If they are, the verified value is returned,
otherwise the proper exception is raised.
"""
if (isinstance(ogr_field, OFTString) and
isinstance(model_field, (models.CharField, models.TextField))):
if self.encoding:
# The encoding for OGR data sources may be specified here
# (e.g., 'cp437' for Census Bureau boundary files).
val = force_text(ogr_field.value, self.encoding)
else:
val = ogr_field.value
if model_field.max_length and len(val) > model_field.max_length:
raise InvalidString('%s model field maximum string length is %s, given %s characters.' %
(model_field.name, model_field.max_length, len(val)))
elif isinstance(ogr_field, OFTReal) and isinstance(model_field, models.DecimalField):
try:
# Creating an instance of the Decimal value to use.
d = Decimal(str(ogr_field.value))
except DecimalInvalidOperation:
raise InvalidDecimal('Could not construct decimal from: %s' % ogr_field.value)
# Getting the decimal value as a tuple.
dtup = d.as_tuple()
digits = dtup[1]
d_idx = dtup[2] # index where the decimal is
# Maximum amount of precision, or digits to the left of the decimal.
max_prec = model_field.max_digits - model_field.decimal_places
# Getting the digits to the left of the decimal place for the
# given decimal.
if d_idx < 0:
n_prec = len(digits[:d_idx])
else:
n_prec = len(digits) + d_idx
# If we have more than the maximum digits allowed, then throw an
# InvalidDecimal exception.
if n_prec > max_prec:
raise InvalidDecimal(
'A DecimalField with max_digits %d, decimal_places %d must '
'round to an absolute value less than 10^%d.' %
(model_field.max_digits, model_field.decimal_places, max_prec)
)
val = d
elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(model_field, models.IntegerField):
# Attempt to convert any OFTReal and OFTString value to an OFTInteger.
try:
val = int(ogr_field.value)
except ValueError:
raise InvalidInteger('Could not construct integer from: %s' % ogr_field.value)
else:
val = ogr_field.value
return val
def verify_fk(self, feat, rel_model, rel_mapping):
"""
Given an OGR Feature, the related model and its dictionary mapping,
this routine will retrieve the related model for the ForeignKey
mapping.
"""
# TODO: It is expensive to retrieve a model for every record --
# explore if an efficient mechanism exists for caching related
# ForeignKey models.
# Constructing and verifying the related model keyword arguments.
fk_kwargs = {}
for field_name, ogr_name in rel_mapping.items():
fk_kwargs[field_name] = self.verify_ogr_field(feat[ogr_name], rel_model._meta.get_field(field_name))
# Attempting to retrieve and return the related model.
try:
return rel_model.objects.using(self.using).get(**fk_kwargs)
except ObjectDoesNotExist:
raise MissingForeignKey(
'No ForeignKey %s model found with keyword arguments: %s' %
(rel_model.__name__, fk_kwargs)
)
def verify_geom(self, geom, model_field):
"""
Verifies the geometry -- will construct and return a GeometryCollection
if necessary (for example if the model field is MultiPolygonField while
the mapped shapefile only contains Polygons).
"""
# Downgrade a 3D geom to a 2D one, if necessary.
if self.coord_dim != geom.coord_dim:
geom.coord_dim = self.coord_dim
if self.make_multi(geom.geom_type, model_field):
# Constructing a multi-geometry type to contain the single geometry
multi_type = self.MULTI_TYPES[geom.geom_type.num]
g = OGRGeometry(multi_type)
g.add(geom)
else:
g = geom
# Transforming the geometry with our Coordinate Transformation object,
# but only if the class variable `transform` is set w/a CoordTransform
# object.
if self.transform:
g.transform(self.transform)
# Returning the WKT of the geometry.
return g.wkt
# #### Other model methods ####
def coord_transform(self):
"Returns the coordinate transformation object."
SpatialRefSys = self.spatial_backend.spatial_ref_sys()
try:
# Getting the target spatial reference system
target_srs = SpatialRefSys.objects.using(self.using).get(srid=self.geo_field.srid).srs
# Creating the CoordTransform object
return CoordTransform(self.source_srs, target_srs)
except Exception as msg:
new_msg = 'Could not translate between the data source and model geometry: %s' % msg
six.reraise(LayerMapError, LayerMapError(new_msg), sys.exc_info()[2])
def geometry_field(self):
"Returns the GeometryField instance associated with the geographic column."
# Use `get_field()` on the model's options so that we
# get the correct field instance if there's model inheritance.
opts = self.model._meta
return opts.get_field(self.geom_field)
def make_multi(self, geom_type, model_field):
"""
Given the OGRGeomType for a geometry and its associated GeometryField,
determine whether the geometry should be turned into a GeometryCollection.
"""
return (geom_type.num in self.MULTI_TYPES and
model_field.__class__.__name__ == 'Multi%s' % geom_type.django)
def save(self, verbose=False, fid_range=False, step=False,
progress=False, silent=False, stream=sys.stdout, strict=False):
"""
Saves the contents from the OGR DataSource Layer into the database
according to the mapping dictionary given at initialization.
Keyword Parameters:
verbose:
If set, information will be printed subsequent to each model save
executed on the database.
fid_range:
May be set with a slice or tuple of (begin, end) feature ID's to map
from the data source. In other words, this keyword enables the user
to selectively import a subset range of features in the geographic
data source.
step:
If set with an integer, transactions will occur at every step
interval. For example, if step=1000, a commit would occur after
the 1,000th feature, the 2,000th feature etc.
progress:
When this keyword is set, status information will be printed giving
the number of features processed and successfully saved. By default,
progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with an
integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
using `sys.stdout`, but any object with a `write` method is supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
this keyword may be set to disable these notifications.
strict:
Execution of the model mapping will cease upon the first error
encountered. The default behavior is to attempt to continue.
"""
# Getting the default Feature ID range.
default_range = self.check_fid_range(fid_range)
# Setting the progress interval, if requested.
if progress:
if progress is True or not isinstance(progress, int):
progress_interval = 1000
else:
progress_interval = progress
def _save(feat_range=default_range, num_feat=0, num_saved=0):
if feat_range:
layer_iter = self.layer[feat_range]
else:
layer_iter = self.layer
for feat in layer_iter:
num_feat += 1
# Getting the keyword arguments
try:
kwargs = self.feature_kwargs(feat)
except LayerMapError as msg:
# Something borked the validation
if strict:
raise
elif not silent:
stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg))
else:
# Constructing the model using the keyword args
is_update = False
if self.unique:
# If we want unique models on a particular field, handle the
# geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
u_kwargs = self.unique_kwargs(kwargs)
m = self.model.objects.using(self.using).get(**u_kwargs)
is_update = True
# Getting the geometry (in OGR form), creating
# one from the kwargs WKT, adding in additional
# geometries, and update the attribute with the
# just-updated geometry WKT.
geom = getattr(m, self.geom_field).ogr
new = OGRGeometry(kwargs[self.geom_field])
for g in new:
geom.add(g)
setattr(m, self.geom_field, geom.wkt)
except ObjectDoesNotExist:
# No unique model exists yet, create.
m = self.model(**kwargs)
else:
m = self.model(**kwargs)
try:
# Attempting to save.
m.save(using=self.using)
num_saved += 1
if verbose:
stream.write('%s: %s\n' % ('Updated' if is_update else 'Saved', m))
except Exception as msg:
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write(
'Failed to save the feature (id: %s) into the '
'model with the keyword arguments:\n' % feat.fid
)
stream.write('%s\n' % kwargs)
raise
elif not silent:
stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg))
# Printing progress information, if requested.
if progress and num_feat % progress_interval == 0:
stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved))
# Only used for status output purposes -- incremental saving uses the
# values returned here.
return num_saved, num_feat
if self.transaction_decorator is not None:
_save = self.transaction_decorator(_save)
nfeat = self.layer.num_feat
if step and isinstance(step, int) and step < nfeat:
# Incremental saving is requested at the given interval (step)
if default_range:
raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.')
beg, num_feat, num_saved = (0, 0, 0)
indices = range(step, nfeat, step)
n_i = len(indices)
for i, end in enumerate(indices):
# Constructing the slice to use for this step; the last slice is
# special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i:
step_slice = slice(beg, None)
else:
step_slice = slice(beg, end)
try:
num_feat, num_saved = _save(step_slice, num_feat, num_saved)
beg = end
except Exception: # Deliberately catch everything
stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice))
raise
else:
# Otherwise, just calling the previously defined _save() function.
_save()
| jsoref/django | django/contrib/gis/utils/layermapping.py | Python | bsd-3-clause | 27,310 | 0.00216 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import organizations
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
author="Ben Lopatin",
author_email="[email protected]",
name='django-organizations',
version=organizations.__version__,
description='Group accounts for Django',
long_description=readme + '\n\n' + history,
url='https://github.com/bennylope/django-organizations/',
license='BSD License',
platforms=['OS Independent'],
packages=[
'organizations',
'organizations.backends',
'organizations.south_migrations',
'organizations.templatetags',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
install_requires=[
'Django>=1.4.2',
],
test_suite='tests',
include_package_data=True,
zip_safe=False,
)
| bennylope/django-site-broadcasts | test.py | Python | mit | 1,601 | 0 |
# -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
"""
from flask import url_for
from recruit_app.user.models import User
from .factories import UserFactory
class TestLoggingIn:
"""Login."""
def test_can_log_in_returns_200(self, user, testapp):
"""Login successful."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form in navbar
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'myprecious'
# Submits
# res = form.submit().follow()
res = form.submit()
assert res.status_code == 200
def test_sees_alert_on_log_out(self, user, testapp):
"""Show alert on logout."""
res = testapp.get('/')
# Fills out login form in navbar
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'myprecious'
# Submits
res = form.submit()
res = testapp.get(url_for('security.logout')).follow()
# sees alert
assert 'loginForm' in res
def test_sees_error_message_if_password_is_incorrect(self, user, testapp):
"""Show error if password is incorrect."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'wrong'
# Submits
res = form.submit()
# sees error
assert 'Invalid password' in res
def test_sees_error_message_if_email_doesnt_exist(self, user, testapp):
"""Show error if email doesn't exist."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['email'] = '[email protected]'
form['password'] = 'myprecious'
# Submits
res = form.submit()
# sees error
assert 'Specified user does not exist' in res
class TestRegistering:
"""Register a user."""
def test_can_register(self, user, testapp):
"""Register a new user."""
old_count = len(User.query.all())
# Goes to homepage
res = testapp.get('/')
# Clicks Create Account button
res = res.click('Create account')
# Fills out the form
form = res.forms['registerForm']
form['email'] = '[email protected]'
form['password'] = 'secret'
form['password_confirm'] = 'secret'
# Submits
# res = form.submit().follow()
res = form.submit().follow()
assert res.status_code == 200
# A new user was created
assert len(User.query.all()) == old_count + 1
def test_sees_error_message_if_passwords_dont_match(self, user, testapp):
"""Show error if passwords don't match."""
# Goes to registration page
res = testapp.get(url_for('security.register'))
# Fills out form, but passwords don't match
form = res.forms['registerForm']
form['email'] = '[email protected]'
form['password'] = 'secret'
form['password_confirm'] = 'secrets'
# Submits
res = form.submit()
# sees error message
assert 'Passwords do not match' in res
def test_sees_error_message_if_user_already_registered(self, user, testapp):
"""Show error if user already registered."""
user = UserFactory(active=True) # A registered user
user.save()
# Goes to registration page
res = testapp.get(url_for('security.register'))
# Fills out form, but email is already registered
form = res.forms['registerForm']
form['email'] = user.email
form['password'] = 'secret'
form['password_confirm'] = 'secret'
# Submits
res = form.submit()
# sees error
assert 'is already associated with an account' in res
| tyler274/Recruitment-App | tests/test_functional.py | Python | bsd-3-clause | 3,980 | 0.000251 |
import sklearn.neural_network
class NeuralNet:
def __init__(self):
self.regression = sklearn.neural_network.MLPRegressor(hidden_layer_sizes=100)
def train(self, X, Y):
self.regression.fit(X, Y)
def score(self, X):
return self.regression.predict(X)
def set_param(self, param):
self.regression.set_params(param)
def get_param(self):
return self.regression.get_params() | m5w/matxin-lineariser | matxin_lineariser/statistical_linearisation/Linearisation/NeuralNet.py | Python | gpl-3.0 | 431 | 0.006961 |
"""
Acceptance tests for Home Page (My Courses / My Libraries).
"""
from bok_choy.web_app_test import WebAppTest
from opaque_keys.edx.locator import LibraryLocator
from ...fixtures import PROGRAMS_STUB_URL
from ...fixtures.config import ConfigModelFixture
from ...fixtures.programs import ProgramsFixture
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.studio.library import LibraryEditPage
from ...pages.studio.index import DashboardPage, DashboardPageWithPrograms
class CreateLibraryTest(WebAppTest):
"""
Test that we can create a new content library on the studio home page.
"""
def setUp(self):
"""
Load the helper for the home page (dashboard page)
"""
super(CreateLibraryTest, self).setUp()
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPage(self.browser)
def test_create_library(self):
"""
From the home page:
Click "New Library"
Fill out the form
Submit the form
We should be redirected to the edit view for the library
Return to the home page
The newly created library should now appear in the list of libraries
"""
name = "New Library Name"
org = "TestOrgX"
number = "TESTLIB"
self.auth_page.visit()
self.dashboard_page.visit()
self.assertFalse(self.dashboard_page.has_library(name=name, org=org, number=number))
self.assertTrue(self.dashboard_page.has_new_library_button())
self.dashboard_page.click_new_library()
self.assertTrue(self.dashboard_page.is_new_library_form_visible())
self.dashboard_page.fill_new_library_form(name, org, number)
self.assertTrue(self.dashboard_page.is_new_library_form_valid())
self.dashboard_page.submit_new_library_form()
# The next page is the library edit view; make sure it loads:
lib_page = LibraryEditPage(self.browser, LibraryLocator(org, number))
lib_page.wait_for_page()
# Then go back to the home page and make sure the new library is listed there:
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.has_library(name=name, org=org, number=number))
class DashboardProgramsTabTest(WebAppTest):
"""
Test the programs tab on the studio home page.
"""
def setUp(self):
super(DashboardProgramsTabTest, self).setUp()
ProgramsFixture().install_programs([])
self.auth_page = AutoAuthPage(self.browser, staff=True)
self.dashboard_page = DashboardPageWithPrograms(self.browser)
self.auth_page.visit()
def set_programs_api_configuration(self, is_enabled=False, api_version=1, api_url=PROGRAMS_STUB_URL,
js_path='/js', css_path='/css'):
"""
Dynamically adjusts the programs API config model during tests.
"""
ConfigModelFixture('/config/programs', {
'enabled': is_enabled,
'enable_studio_tab': is_enabled,
'enable_student_dashboard': is_enabled,
'api_version_number': api_version,
'internal_service_url': api_url,
'public_service_url': api_url,
'authoring_app_js_path': js_path,
'authoring_app_css_path': css_path,
'cache_ttl': 0
}).install()
def test_tab_is_disabled(self):
"""
The programs tab and "new program" button should not appear at all
unless enabled via the config model.
"""
self.set_programs_api_configuration()
self.dashboard_page.visit()
self.assertFalse(self.dashboard_page.is_programs_tab_present())
self.assertFalse(self.dashboard_page.is_new_program_button_present())
def test_tab_is_enabled_with_empty_list(self):
"""
The programs tab and "new program" button should appear when enabled
via config. When the programs list is empty, a button should appear
that allows creating a new program.
"""
self.set_programs_api_configuration(True)
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.is_programs_tab_present())
self.assertTrue(self.dashboard_page.is_new_program_button_present())
results = self.dashboard_page.get_program_list()
self.assertEqual(results, [])
self.assertTrue(self.dashboard_page.is_empty_list_create_button_present())
def test_tab_is_enabled_with_nonempty_list(self):
"""
The programs tab and "new program" button should appear when enabled
via config, and the results of the program list should display when
the list is nonempty.
"""
test_program_values = [('first program', 'org1'), ('second program', 'org2')]
ProgramsFixture().install_programs(test_program_values)
self.set_programs_api_configuration(True)
self.dashboard_page.visit()
self.assertTrue(self.dashboard_page.is_programs_tab_present())
self.assertTrue(self.dashboard_page.is_new_program_button_present())
results = self.dashboard_page.get_program_list()
self.assertEqual(results, test_program_values)
self.assertFalse(self.dashboard_page.is_empty_list_create_button_present())
def test_tab_requires_staff(self):
"""
The programs tab and "new program" button will not be available, even
when enabled via config, if the user is not global staff.
"""
self.set_programs_api_configuration(True)
AutoAuthPage(self.browser, staff=False).visit()
self.dashboard_page.visit()
self.assertFalse(self.dashboard_page.is_programs_tab_present())
self.assertFalse(self.dashboard_page.is_new_program_button_present())
| IndonesiaX/edx-platform | common/test/acceptance/tests/studio/test_studio_home.py | Python | agpl-3.0 | 5,862 | 0.001365 |
#!/usr/bin/python
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Burnin program
"""
import sys
import optparse
import time
import socket
import urllib
from itertools import izip, islice, cycle
from cStringIO import StringIO
from ganeti import opcodes
from ganeti import constants
from ganeti import cli
from ganeti import errors
from ganeti import utils
from ganeti import hypervisor
from ganeti import compat
from ganeti import pathutils
from ganeti.confd import client as confd_client
USAGE = ("\tburnin -o OS_NAME [options...] instance_name ...")
MAX_RETRIES = 3
LOG_HEADERS = {
0: "- ",
1: "* ",
2: "",
}
#: Disk templates supporting a single node
_SINGLE_NODE_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_PLAIN,
constants.DT_FILE,
constants.DT_SHARED_FILE,
constants.DT_EXT,
constants.DT_RBD,
])
_SUPPORTED_DISK_TEMPLATES = compat.UniqueFrozenset([
constants.DT_DISKLESS,
constants.DT_DRBD8,
constants.DT_EXT,
constants.DT_FILE,
constants.DT_PLAIN,
constants.DT_RBD,
constants.DT_SHARED_FILE,
])
#: Disk templates for which import/export is tested
_IMPEXP_DISK_TEMPLATES = (_SUPPORTED_DISK_TEMPLATES - frozenset([
constants.DT_DISKLESS,
constants.DT_FILE,
constants.DT_SHARED_FILE,
]))
class InstanceDown(Exception):
"""The checked instance was not up"""
class BurninFailure(Exception):
"""Failure detected during burning"""
def Usage():
"""Shows program usage information and exits the program."""
print >> sys.stderr, "Usage:"
print >> sys.stderr, USAGE
sys.exit(2)
def Log(msg, *args, **kwargs):
"""Simple function that prints out its argument.
"""
if args:
msg = msg % args
indent = kwargs.get("indent", 0)
sys.stdout.write("%*s%s%s\n" % (2 * indent, "",
LOG_HEADERS.get(indent, " "), msg))
sys.stdout.flush()
def Err(msg, exit_code=1):
"""Simple error logging that prints to stderr.
"""
sys.stderr.write(msg + "\n")
sys.stderr.flush()
sys.exit(exit_code)
class SimpleOpener(urllib.FancyURLopener):
"""A simple url opener"""
# pylint: disable=W0221
def prompt_user_passwd(self, host, realm, clear_cache=0):
"""No-interaction version of prompt_user_passwd."""
# we follow parent class' API
# pylint: disable=W0613
return None, None
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Custom error handling"""
# make sure sockets are not left in CLOSE_WAIT, this is similar
# but with a different exception to the BasicURLOpener class
_ = fp.read() # throw away data
fp.close()
raise InstanceDown("HTTP error returned: code %s, msg %s" %
(errcode, errmsg))
OPTIONS = [
cli.cli_option("-o", "--os", dest="os", default=None,
help="OS to use during burnin",
metavar="<OS>",
completion_suggest=cli.OPT_COMPL_ONE_OS),
cli.HYPERVISOR_OPT,
cli.OSPARAMS_OPT,
cli.cli_option("--disk-size", dest="disk_size",
help="Disk size (determines disk count)",
default="128m", type="string", metavar="<size,size,...>",
completion_suggest=("128M 512M 1G 4G 1G,256M"
" 4G,1G,1G 10G").split()),
cli.cli_option("--disk-growth", dest="disk_growth", help="Disk growth",
default="128m", type="string", metavar="<size,size,...>"),
cli.cli_option("--mem-size", dest="mem_size", help="Memory size",
default=None, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--maxmem-size", dest="maxmem_size", help="Max Memory size",
default=256, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--minmem-size", dest="minmem_size", help="Min Memory size",
default=128, type="unit", metavar="<size>",
completion_suggest=("128M 256M 512M 1G 4G 8G"
" 12G 16G").split()),
cli.cli_option("--vcpu-count", dest="vcpu_count", help="VCPU count",
default=3, type="unit", metavar="<count>",
completion_suggest=("1 2 3 4").split()),
cli.DEBUG_OPT,
cli.VERBOSE_OPT,
cli.NOIPCHECK_OPT,
cli.NONAMECHECK_OPT,
cli.EARLY_RELEASE_OPT,
cli.cli_option("--no-replace1", dest="do_replace1",
help="Skip disk replacement with the same secondary",
action="store_false", default=True),
cli.cli_option("--no-replace2", dest="do_replace2",
help="Skip disk replacement with a different secondary",
action="store_false", default=True),
cli.cli_option("--no-failover", dest="do_failover",
help="Skip instance failovers", action="store_false",
default=True),
cli.cli_option("--no-migrate", dest="do_migrate",
help="Skip instance live migration",
action="store_false", default=True),
cli.cli_option("--no-move", dest="do_move",
help="Skip instance moves", action="store_false",
default=True),
cli.cli_option("--no-importexport", dest="do_importexport",
help="Skip instance export/import", action="store_false",
default=True),
cli.cli_option("--no-startstop", dest="do_startstop",
help="Skip instance stop/start", action="store_false",
default=True),
cli.cli_option("--no-reinstall", dest="do_reinstall",
help="Skip instance reinstall", action="store_false",
default=True),
cli.cli_option("--no-reboot", dest="do_reboot",
help="Skip instance reboot", action="store_false",
default=True),
cli.cli_option("--no-renamesame", dest="do_renamesame",
help="Skip instance rename to same name", action="store_false",
default=True),
cli.cli_option("--reboot-types", dest="reboot_types",
help="Specify the reboot types", default=None),
cli.cli_option("--no-activate-disks", dest="do_activate_disks",
help="Skip disk activation/deactivation",
action="store_false", default=True),
cli.cli_option("--no-add-disks", dest="do_addremove_disks",
help="Skip disk addition/removal",
action="store_false", default=True),
cli.cli_option("--no-add-nics", dest="do_addremove_nics",
help="Skip NIC addition/removal",
action="store_false", default=True),
cli.cli_option("--no-nics", dest="nics",
help="No network interfaces", action="store_const",
const=[], default=[{}]),
cli.cli_option("--no-confd", dest="do_confd_tests",
help="Skip confd queries",
action="store_false", default=constants.ENABLE_CONFD),
cli.cli_option("--rename", dest="rename", default=None,
help=("Give one unused instance name which is taken"
" to start the renaming sequence"),
metavar="<instance_name>"),
cli.cli_option("-t", "--disk-template", dest="disk_template",
choices=list(_SUPPORTED_DISK_TEMPLATES),
default=constants.DT_DRBD8,
help=("Disk template (default %s, otherwise one of %s)" %
(constants.DT_DRBD8,
utils.CommaJoin(_SUPPORTED_DISK_TEMPLATES)))),
cli.cli_option("-n", "--nodes", dest="nodes", default="",
help=("Comma separated list of nodes to perform"
" the burnin on (defaults to all nodes)"),
completion_suggest=cli.OPT_COMPL_MANY_NODES),
cli.cli_option("-I", "--iallocator", dest="iallocator",
default=None, type="string",
help=("Perform the allocation using an iallocator"
" instead of fixed node spread (node restrictions no"
" longer apply, therefore -n/--nodes must not be"
" used"),
completion_suggest=cli.OPT_COMPL_ONE_IALLOCATOR),
cli.cli_option("-p", "--parallel", default=False, action="store_true",
dest="parallel",
help=("Enable parallelization of some operations in"
" order to speed burnin or to test granular locking")),
cli.cli_option("--net-timeout", default=15, type="int",
dest="net_timeout",
help=("The instance check network timeout in seconds"
" (defaults to 15 seconds)"),
completion_suggest="15 60 300 900".split()),
cli.cli_option("-C", "--http-check", default=False, action="store_true",
dest="http_check",
help=("Enable checking of instance status via http,"
" looking for /hostname.txt that should contain the"
" name of the instance")),
cli.cli_option("-K", "--keep-instances", default=False,
action="store_true",
dest="keep_instances",
help=("Leave instances on the cluster after burnin,"
" for investigation in case of errors or simply"
" to use them")),
cli.REASON_OPT,
]
# Mainly used for bash completion
ARGUMENTS = [cli.ArgInstance(min=1)]
def _DoCheckInstances(fn):
"""Decorator for checking instances.
"""
def wrapper(self, *args, **kwargs):
val = fn(self, *args, **kwargs)
for instance in self.instances:
self._CheckInstanceAlive(instance) # pylint: disable=W0212
return val
return wrapper
def _DoBatch(retry):
"""Decorator for possible batch operations.
Must come after the _DoCheckInstances decorator (if any).
@param retry: whether this is a retryable batch, will be
passed to StartBatch
"""
def wrap(fn):
def batched(self, *args, **kwargs):
self.StartBatch(retry)
val = fn(self, *args, **kwargs)
self.CommitQueue()
return val
return batched
return wrap
class Burner(object):
"""Burner class."""
def __init__(self):
"""Constructor."""
self.url_opener = SimpleOpener()
self._feed_buf = StringIO()
self.nodes = []
self.instances = []
self.to_rem = []
self.queued_ops = []
self.opts = None
self.queue_retry = False
self.disk_count = self.disk_growth = self.disk_size = None
self.hvp = self.bep = None
self.ParseOptions()
self.cl = cli.GetClient()
self.GetState()
def ClearFeedbackBuf(self):
"""Clear the feedback buffer."""
self._feed_buf.truncate(0)
def GetFeedbackBuf(self):
"""Return the contents of the buffer."""
return self._feed_buf.getvalue()
def Feedback(self, msg):
"""Acumulate feedback in our buffer."""
formatted_msg = "%s %s" % (time.ctime(utils.MergeTime(msg[0])), msg[2])
self._feed_buf.write(formatted_msg + "\n")
if self.opts.verbose:
Log(formatted_msg, indent=3)
def MaybeRetry(self, retry_count, msg, fn, *args):
"""Possibly retry a given function execution.
@type retry_count: int
@param retry_count: retry counter:
- 0: non-retryable action
- 1: last retry for a retryable action
- MAX_RETRIES: original try for a retryable action
@type msg: str
@param msg: the kind of the operation
@type fn: callable
@param fn: the function to be called
"""
try:
val = fn(*args)
if retry_count > 0 and retry_count < MAX_RETRIES:
Log("Idempotent %s succeeded after %d retries",
msg, MAX_RETRIES - retry_count)
return val
except Exception, err: # pylint: disable=W0703
if retry_count == 0:
Log("Non-idempotent %s failed, aborting", msg)
raise
elif retry_count == 1:
Log("Idempotent %s repeated failure, aborting", msg)
raise
else:
Log("Idempotent %s failed, retry #%d/%d: %s",
msg, MAX_RETRIES - retry_count + 1, MAX_RETRIES, err)
self.MaybeRetry(retry_count - 1, msg, fn, *args)
def _ExecOp(self, *ops):
"""Execute one or more opcodes and manage the exec buffer.
@return: if only opcode has been passed, we return its result;
otherwise we return the list of results
"""
job_id = cli.SendJob(ops, cl=self.cl)
results = cli.PollJob(job_id, cl=self.cl, feedback_fn=self.Feedback)
if len(ops) == 1:
return results[0]
else:
return results
def ExecOp(self, retry, *ops):
"""Execute one or more opcodes and manage the exec buffer.
@return: if only opcode has been passed, we return its result;
otherwise we return the list of results
"""
if retry:
rval = MAX_RETRIES
else:
rval = 0
cli.SetGenericOpcodeOpts(ops, self.opts)
return self.MaybeRetry(rval, "opcode", self._ExecOp, *ops)
def ExecOrQueue(self, name, ops, post_process=None):
"""Execute an opcode and manage the exec buffer."""
if self.opts.parallel:
cli.SetGenericOpcodeOpts(ops, self.opts)
self.queued_ops.append((ops, name, post_process))
else:
val = self.ExecOp(self.queue_retry, *ops) # pylint: disable=W0142
if post_process is not None:
post_process()
return val
def StartBatch(self, retry):
"""Start a new batch of jobs.
@param retry: whether this is a retryable batch
"""
self.queued_ops = []
self.queue_retry = retry
def CommitQueue(self):
"""Execute all submitted opcodes in case of parallel burnin"""
if not self.opts.parallel or not self.queued_ops:
return
if self.queue_retry:
rval = MAX_RETRIES
else:
rval = 0
try:
results = self.MaybeRetry(rval, "jobset", self.ExecJobSet,
self.queued_ops)
finally:
self.queued_ops = []
return results
def ExecJobSet(self, jobs):
"""Execute a set of jobs and return once all are done.
The method will return the list of results, if all jobs are
successful. Otherwise, OpExecError will be raised from within
cli.py.
"""
self.ClearFeedbackBuf()
jex = cli.JobExecutor(cl=self.cl, feedback_fn=self.Feedback)
for ops, name, _ in jobs:
jex.QueueJob(name, *ops) # pylint: disable=W0142
try:
results = jex.GetResults()
except Exception, err: # pylint: disable=W0703
Log("Jobs failed: %s", err)
raise BurninFailure()
fail = False
val = []
for (_, name, post_process), (success, result) in zip(jobs, results):
if success:
if post_process:
try:
post_process()
except Exception, err: # pylint: disable=W0703
Log("Post process call for job %s failed: %s", name, err)
fail = True
val.append(result)
else:
fail = True
if fail:
raise BurninFailure()
return val
def ParseOptions(self):
"""Parses the command line options.
In case of command line errors, it will show the usage and exit the
program.
"""
parser = optparse.OptionParser(usage="\n%s" % USAGE,
version=("%%prog (ganeti) %s" %
constants.RELEASE_VERSION),
option_list=OPTIONS)
options, args = parser.parse_args()
if len(args) < 1 or options.os is None:
Usage()
if options.mem_size:
options.maxmem_size = options.mem_size
options.minmem_size = options.mem_size
elif options.minmem_size > options.maxmem_size:
Err("Maximum memory lower than minimum memory")
if options.disk_template not in _SUPPORTED_DISK_TEMPLATES:
Err("Unknown or unsupported disk template '%s'" % options.disk_template)
if options.disk_template == constants.DT_DISKLESS:
disk_size = disk_growth = []
options.do_addremove_disks = False
else:
disk_size = [utils.ParseUnit(v) for v in options.disk_size.split(",")]
disk_growth = [utils.ParseUnit(v)
for v in options.disk_growth.split(",")]
if len(disk_growth) != len(disk_size):
Err("Wrong disk sizes/growth combination")
if ((disk_size and options.disk_template == constants.DT_DISKLESS) or
(not disk_size and options.disk_template != constants.DT_DISKLESS)):
Err("Wrong disk count/disk template combination")
self.disk_size = disk_size
self.disk_growth = disk_growth
self.disk_count = len(disk_size)
if options.nodes and options.iallocator:
Err("Give either the nodes option or the iallocator option, not both")
if options.http_check and not options.name_check:
Err("Can't enable HTTP checks without name checks")
self.opts = options
self.instances = args
self.bep = {
constants.BE_MINMEM: options.minmem_size,
constants.BE_MAXMEM: options.maxmem_size,
constants.BE_VCPUS: options.vcpu_count,
}
self.hypervisor = None
self.hvp = {}
if options.hypervisor:
self.hypervisor, self.hvp = options.hypervisor
if options.reboot_types is None:
options.reboot_types = constants.REBOOT_TYPES
else:
options.reboot_types = options.reboot_types.split(",")
rt_diff = set(options.reboot_types).difference(constants.REBOOT_TYPES)
if rt_diff:
Err("Invalid reboot types specified: %s" % utils.CommaJoin(rt_diff))
socket.setdefaulttimeout(options.net_timeout)
def GetState(self):
"""Read the cluster state from the master daemon."""
if self.opts.nodes:
names = self.opts.nodes.split(",")
else:
names = []
try:
op = opcodes.OpNodeQuery(output_fields=["name", "offline", "drained"],
names=names, use_locking=True)
result = self.ExecOp(True, op)
except errors.GenericError, err:
err_code, msg = cli.FormatError(err)
Err(msg, exit_code=err_code)
self.nodes = [data[0] for data in result if not (data[1] or data[2])]
op_diagnose = opcodes.OpOsDiagnose(output_fields=["name",
"variants",
"hidden"],
names=[])
result = self.ExecOp(True, op_diagnose)
if not result:
Err("Can't get the OS list")
found = False
for (name, variants, _) in result:
if self.opts.os in cli.CalculateOSNames(name, variants):
found = True
break
if not found:
Err("OS '%s' not found" % self.opts.os)
cluster_info = self.cl.QueryClusterInfo()
self.cluster_info = cluster_info
if not self.cluster_info:
Err("Can't get cluster info")
default_nic_params = self.cluster_info["nicparams"][constants.PP_DEFAULT]
self.cluster_default_nicparams = default_nic_params
if self.hypervisor is None:
self.hypervisor = self.cluster_info["default_hypervisor"]
self.hv_can_migrate = \
hypervisor.GetHypervisorClass(self.hypervisor).CAN_MIGRATE
@_DoCheckInstances
@_DoBatch(False)
def BurnCreateInstances(self):
"""Create the given instances.
"""
self.to_rem = []
mytor = izip(cycle(self.nodes),
islice(cycle(self.nodes), 1, None),
self.instances)
Log("Creating instances")
for pnode, snode, instance in mytor:
Log("instance %s", instance, indent=1)
if self.opts.iallocator:
pnode = snode = None
msg = "with iallocator %s" % self.opts.iallocator
elif self.opts.disk_template not in constants.DTS_INT_MIRROR:
snode = None
msg = "on %s" % pnode
else:
msg = "on %s, %s" % (pnode, snode)
Log(msg, indent=2)
op = opcodes.OpInstanceCreate(instance_name=instance,
disks=[{"size": size}
for size in self.disk_size],
disk_template=self.opts.disk_template,
nics=self.opts.nics,
mode=constants.INSTANCE_CREATE,
os_type=self.opts.os,
pnode=pnode,
snode=snode,
start=True,
ip_check=self.opts.ip_check,
name_check=self.opts.name_check,
wait_for_sync=True,
file_driver="loop",
file_storage_dir=None,
iallocator=self.opts.iallocator,
beparams=self.bep,
hvparams=self.hvp,
hypervisor=self.hypervisor,
osparams=self.opts.osparams,
)
remove_instance = lambda name: lambda: self.to_rem.append(name)
self.ExecOrQueue(instance, [op], post_process=remove_instance(instance))
@_DoBatch(False)
def BurnModifyRuntimeMemory(self):
"""Alter the runtime memory."""
Log("Setting instance runtime memory")
for instance in self.instances:
Log("instance %s", instance, indent=1)
tgt_mem = self.bep[constants.BE_MINMEM]
op = opcodes.OpInstanceSetParams(instance_name=instance,
runtime_mem=tgt_mem)
Log("Set memory to %s MB", tgt_mem, indent=2)
self.ExecOrQueue(instance, [op])
@_DoBatch(False)
def BurnGrowDisks(self):
"""Grow both the os and the swap disks by the requested amount, if any."""
Log("Growing disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
for idx, growth in enumerate(self.disk_growth):
if growth > 0:
op = opcodes.OpInstanceGrowDisk(instance_name=instance, disk=idx,
amount=growth, wait_for_sync=True)
Log("increase disk/%s by %s MB", idx, growth, indent=2)
self.ExecOrQueue(instance, [op])
@_DoBatch(True)
def BurnReplaceDisks1D8(self):
"""Replace disks on primary and secondary for drbd8."""
Log("Replacing disks on the same nodes")
early_release = self.opts.early_release
for instance in self.instances:
Log("instance %s", instance, indent=1)
ops = []
for mode in constants.REPLACE_DISK_SEC, constants.REPLACE_DISK_PRI:
op = opcodes.OpInstanceReplaceDisks(instance_name=instance,
mode=mode,
disks=list(range(self.disk_count)),
early_release=early_release)
Log("run %s", mode, indent=2)
ops.append(op)
self.ExecOrQueue(instance, ops)
@_DoBatch(True)
def BurnReplaceDisks2(self):
"""Replace secondary node."""
Log("Changing the secondary node")
mode = constants.REPLACE_DISK_CHG
mytor = izip(islice(cycle(self.nodes), 2, None),
self.instances)
for tnode, instance in mytor:
Log("instance %s", instance, indent=1)
if self.opts.iallocator:
tnode = None
msg = "with iallocator %s" % self.opts.iallocator
else:
msg = tnode
op = opcodes.OpInstanceReplaceDisks(instance_name=instance,
mode=mode,
remote_node=tnode,
iallocator=self.opts.iallocator,
disks=[],
early_release=self.opts.early_release)
Log("run %s %s", mode, msg, indent=2)
self.ExecOrQueue(instance, [op])
@_DoCheckInstances
@_DoBatch(False)
def BurnFailover(self):
"""Failover the instances."""
Log("Failing over instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceFailover(instance_name=instance,
ignore_consistency=False)
self.ExecOrQueue(instance, [op])
@_DoCheckInstances
@_DoBatch(False)
def BurnMove(self):
"""Move the instances."""
Log("Moving instances")
mytor = izip(islice(cycle(self.nodes), 1, None),
self.instances)
for tnode, instance in mytor:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceMove(instance_name=instance,
target_node=tnode)
self.ExecOrQueue(instance, [op])
@_DoBatch(False)
def BurnMigrate(self):
"""Migrate the instances."""
Log("Migrating instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = opcodes.OpInstanceMigrate(instance_name=instance, mode=None,
cleanup=False)
op2 = opcodes.OpInstanceMigrate(instance_name=instance, mode=None,
cleanup=True)
Log("migration and migration cleanup", indent=2)
self.ExecOrQueue(instance, [op1, op2])
@_DoCheckInstances
@_DoBatch(False)
def BurnImportExport(self):
"""Export the instance, delete it, and import it back.
"""
Log("Exporting and re-importing instances")
mytor = izip(cycle(self.nodes),
islice(cycle(self.nodes), 1, None),
islice(cycle(self.nodes), 2, None),
self.instances)
for pnode, snode, enode, instance in mytor:
Log("instance %s", instance, indent=1)
# read the full name of the instance
nam_op = opcodes.OpInstanceQuery(output_fields=["name"],
names=[instance], use_locking=True)
full_name = self.ExecOp(False, nam_op)[0][0]
if self.opts.iallocator:
pnode = snode = None
import_log_msg = ("import from %s"
" with iallocator %s" %
(enode, self.opts.iallocator))
elif self.opts.disk_template not in constants.DTS_INT_MIRROR:
snode = None
import_log_msg = ("import from %s to %s" %
(enode, pnode))
else:
import_log_msg = ("import from %s to %s, %s" %
(enode, pnode, snode))
exp_op = opcodes.OpBackupExport(instance_name=instance,
target_node=enode,
mode=constants.EXPORT_MODE_LOCAL,
shutdown=True)
rem_op = opcodes.OpInstanceRemove(instance_name=instance,
ignore_failures=True)
imp_dir = utils.PathJoin(pathutils.EXPORT_DIR, full_name)
imp_op = opcodes.OpInstanceCreate(instance_name=instance,
disks=[{"size": size}
for size in self.disk_size],
disk_template=self.opts.disk_template,
nics=self.opts.nics,
mode=constants.INSTANCE_IMPORT,
src_node=enode,
src_path=imp_dir,
pnode=pnode,
snode=snode,
start=True,
ip_check=self.opts.ip_check,
name_check=self.opts.name_check,
wait_for_sync=True,
file_storage_dir=None,
file_driver="loop",
iallocator=self.opts.iallocator,
beparams=self.bep,
hvparams=self.hvp,
osparams=self.opts.osparams,
)
erem_op = opcodes.OpBackupRemove(instance_name=instance)
Log("export to node %s", enode, indent=2)
Log("remove instance", indent=2)
Log(import_log_msg, indent=2)
Log("remove export", indent=2)
self.ExecOrQueue(instance, [exp_op, rem_op, imp_op, erem_op])
@staticmethod
def StopInstanceOp(instance):
"""Stop given instance."""
return opcodes.OpInstanceShutdown(instance_name=instance)
@staticmethod
def StartInstanceOp(instance):
"""Start given instance."""
return opcodes.OpInstanceStartup(instance_name=instance, force=False)
@staticmethod
def RenameInstanceOp(instance, instance_new):
"""Rename instance."""
return opcodes.OpInstanceRename(instance_name=instance,
new_name=instance_new)
@_DoCheckInstances
@_DoBatch(True)
def BurnStopStart(self):
"""Stop/start the instances."""
Log("Stopping and starting instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2])
@_DoBatch(False)
def BurnRemove(self):
"""Remove the instances."""
Log("Removing instances")
for instance in self.to_rem:
Log("instance %s", instance, indent=1)
op = opcodes.OpInstanceRemove(instance_name=instance,
ignore_failures=True)
self.ExecOrQueue(instance, [op])
def BurnRename(self):
"""Rename the instances.
Note that this function will not execute in parallel, since we
only have one target for rename.
"""
Log("Renaming instances")
rename = self.opts.rename
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_stop1 = self.StopInstanceOp(instance)
op_stop2 = self.StopInstanceOp(rename)
op_rename1 = self.RenameInstanceOp(instance, rename)
op_rename2 = self.RenameInstanceOp(rename, instance)
op_start1 = self.StartInstanceOp(rename)
op_start2 = self.StartInstanceOp(instance)
self.ExecOp(False, op_stop1, op_rename1, op_start1)
self._CheckInstanceAlive(rename)
self.ExecOp(False, op_stop2, op_rename2, op_start2)
self._CheckInstanceAlive(instance)
@_DoCheckInstances
@_DoBatch(True)
def BurnReinstall(self):
"""Reinstall the instances."""
Log("Reinstalling instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = opcodes.OpInstanceReinstall(instance_name=instance)
Log("reinstall without passing the OS", indent=2)
op3 = opcodes.OpInstanceReinstall(instance_name=instance,
os_type=self.opts.os)
Log("reinstall specifying the OS", indent=2)
op4 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2, op3, op4])
@_DoCheckInstances
@_DoBatch(True)
def BurnReboot(self):
"""Reboot the instances."""
Log("Rebooting instances")
for instance in self.instances:
Log("instance %s", instance, indent=1)
ops = []
for reboot_type in self.opts.reboot_types:
op = opcodes.OpInstanceReboot(instance_name=instance,
reboot_type=reboot_type,
ignore_secondaries=False)
Log("reboot with type '%s'", reboot_type, indent=2)
ops.append(op)
self.ExecOrQueue(instance, ops)
@_DoCheckInstances
@_DoBatch(True)
def BurnRenameSame(self):
"""Rename the instances to their own name."""
Log("Renaming the instances to their own name")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op1 = self.StopInstanceOp(instance)
op2 = self.RenameInstanceOp(instance, instance)
Log("rename to the same name", indent=2)
op4 = self.StartInstanceOp(instance)
self.ExecOrQueue(instance, [op1, op2, op4])
@_DoCheckInstances
@_DoBatch(True)
def BurnActivateDisks(self):
"""Activate and deactivate disks of the instances."""
Log("Activating/deactivating disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_start = self.StartInstanceOp(instance)
op_act = opcodes.OpInstanceActivateDisks(instance_name=instance)
op_deact = opcodes.OpInstanceDeactivateDisks(instance_name=instance)
op_stop = self.StopInstanceOp(instance)
Log("activate disks when online", indent=2)
Log("activate disks when offline", indent=2)
Log("deactivate disks (when offline)", indent=2)
self.ExecOrQueue(instance, [op_act, op_stop, op_act, op_deact, op_start])
@_DoCheckInstances
@_DoBatch(False)
def BurnAddRemoveDisks(self):
"""Add and remove an extra disk for the instances."""
Log("Adding and removing disks")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_add = opcodes.OpInstanceSetParams(
instance_name=instance,
disks=[(constants.DDM_ADD, {"size": self.disk_size[0]})])
op_rem = opcodes.OpInstanceSetParams(
instance_name=instance, disks=[(constants.DDM_REMOVE, {})])
op_stop = self.StopInstanceOp(instance)
op_start = self.StartInstanceOp(instance)
Log("adding a disk", indent=2)
Log("removing last disk", indent=2)
self.ExecOrQueue(instance, [op_add, op_stop, op_rem, op_start])
@_DoBatch(False)
def BurnAddRemoveNICs(self):
"""Add, change and remove an extra NIC for the instances."""
Log("Adding and removing NICs")
for instance in self.instances:
Log("instance %s", instance, indent=1)
op_add = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_ADD, {})])
op_chg = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_MODIFY,
-1, {"mac": constants.VALUE_GENERATE})])
op_rem = opcodes.OpInstanceSetParams(
instance_name=instance, nics=[(constants.DDM_REMOVE, {})])
Log("adding a NIC", indent=2)
Log("changing a NIC", indent=2)
Log("removing last NIC", indent=2)
self.ExecOrQueue(instance, [op_add, op_chg, op_rem])
def ConfdCallback(self, reply):
"""Callback for confd queries"""
if reply.type == confd_client.UPCALL_REPLY:
if reply.server_reply.status != constants.CONFD_REPL_STATUS_OK:
Err("Query %s gave non-ok status %s: %s" % (reply.orig_request,
reply.server_reply.status,
reply.server_reply))
if reply.orig_request.type == constants.CONFD_REQ_PING:
Log("Ping: OK", indent=1)
elif reply.orig_request.type == constants.CONFD_REQ_CLUSTER_MASTER:
if reply.server_reply.answer == self.cluster_info["master"]:
Log("Master: OK", indent=1)
else:
Err("Master: wrong: %s" % reply.server_reply.answer)
elif reply.orig_request.type == constants.CONFD_REQ_NODE_ROLE_BYNAME:
if reply.server_reply.answer == constants.CONFD_NODE_ROLE_MASTER:
Log("Node role for master: OK", indent=1)
else:
Err("Node role for master: wrong: %s" % reply.server_reply.answer)
def DoConfdRequestReply(self, req):
self.confd_counting_callback.RegisterQuery(req.rsalt)
self.confd_client.SendRequest(req, async=False)
while not self.confd_counting_callback.AllAnswered():
if not self.confd_client.ReceiveReply():
Err("Did not receive all expected confd replies")
break
def BurnConfd(self):
"""Run confd queries for our instances.
The following confd queries are tested:
- CONFD_REQ_PING: simple ping
- CONFD_REQ_CLUSTER_MASTER: cluster master
- CONFD_REQ_NODE_ROLE_BYNAME: node role, for the master
"""
Log("Checking confd results")
filter_callback = confd_client.ConfdFilterCallback(self.ConfdCallback)
counting_callback = confd_client.ConfdCountingCallback(filter_callback)
self.confd_counting_callback = counting_callback
self.confd_client = confd_client.GetConfdClient(counting_callback)
req = confd_client.ConfdClientRequest(type=constants.CONFD_REQ_PING)
self.DoConfdRequestReply(req)
req = confd_client.ConfdClientRequest(
type=constants.CONFD_REQ_CLUSTER_MASTER)
self.DoConfdRequestReply(req)
req = confd_client.ConfdClientRequest(
type=constants.CONFD_REQ_NODE_ROLE_BYNAME,
query=self.cluster_info["master"])
self.DoConfdRequestReply(req)
def _CheckInstanceAlive(self, instance):
"""Check if an instance is alive by doing http checks.
This will try to retrieve the url on the instance /hostname.txt
and check that it contains the hostname of the instance. In case
we get ECONNREFUSED, we retry up to the net timeout seconds, for
any other error we abort.
"""
if not self.opts.http_check:
return
end_time = time.time() + self.opts.net_timeout
url = None
while time.time() < end_time and url is None:
try:
url = self.url_opener.open("http://%s/hostname.txt" % instance)
except IOError:
# here we can have connection refused, no route to host, etc.
time.sleep(1)
if url is None:
raise InstanceDown(instance, "Cannot contact instance")
hostname = url.read().strip()
url.close()
if hostname != instance:
raise InstanceDown(instance, ("Hostname mismatch, expected %s, got %s" %
(instance, hostname)))
def BurninCluster(self):
"""Test a cluster intensively.
This will create instances and then start/stop/failover them.
It is safe for existing instances but could impact performance.
"""
Log("Testing global parameters")
if (len(self.nodes) == 1 and
self.opts.disk_template not in _SINGLE_NODE_DISK_TEMPLATES):
Err("When one node is available/selected the disk template must"
" be one of %s" % utils.CommaJoin(_SINGLE_NODE_DISK_TEMPLATES))
if self.opts.do_confd_tests and not constants.ENABLE_CONFD:
Err("You selected confd tests but confd was disabled at configure time")
has_err = True
try:
self.BurnCreateInstances()
if self.bep[constants.BE_MINMEM] < self.bep[constants.BE_MAXMEM]:
self.BurnModifyRuntimeMemory()
if self.opts.do_replace1 and \
self.opts.disk_template in constants.DTS_INT_MIRROR:
self.BurnReplaceDisks1D8()
if (self.opts.do_replace2 and len(self.nodes) > 2 and
self.opts.disk_template in constants.DTS_INT_MIRROR):
self.BurnReplaceDisks2()
if (self.opts.disk_template in constants.DTS_GROWABLE and
compat.any(n > 0 for n in self.disk_growth)):
self.BurnGrowDisks()
if self.opts.do_failover and \
self.opts.disk_template in constants.DTS_MIRRORED:
self.BurnFailover()
if self.opts.do_migrate:
if self.opts.disk_template not in constants.DTS_MIRRORED:
Log("Skipping migration (disk template %s does not support it)",
self.opts.disk_template)
elif not self.hv_can_migrate:
Log("Skipping migration (hypervisor %s does not support it)",
self.hypervisor)
else:
self.BurnMigrate()
if (self.opts.do_move and len(self.nodes) > 1 and
self.opts.disk_template in [constants.DT_PLAIN, constants.DT_FILE]):
self.BurnMove()
if (self.opts.do_importexport and
self.opts.disk_template in _IMPEXP_DISK_TEMPLATES):
self.BurnImportExport()
if self.opts.do_reinstall:
self.BurnReinstall()
if self.opts.do_reboot:
self.BurnReboot()
if self.opts.do_renamesame:
self.BurnRenameSame()
if self.opts.do_addremove_disks:
self.BurnAddRemoveDisks()
default_nic_mode = self.cluster_default_nicparams[constants.NIC_MODE]
# Don't add/remove nics in routed mode, as we would need an ip to add
# them with
if self.opts.do_addremove_nics:
if default_nic_mode == constants.NIC_MODE_BRIDGED:
self.BurnAddRemoveNICs()
else:
Log("Skipping nic add/remove as the cluster is not in bridged mode")
if self.opts.do_activate_disks:
self.BurnActivateDisks()
if self.opts.rename:
self.BurnRename()
if self.opts.do_confd_tests:
self.BurnConfd()
if self.opts.do_startstop:
self.BurnStopStart()
has_err = False
finally:
if has_err:
Log("Error detected: opcode buffer follows:\n\n")
Log(self.GetFeedbackBuf())
Log("\n\n")
if not self.opts.keep_instances:
try:
self.BurnRemove()
except Exception, err: # pylint: disable=W0703
if has_err: # already detected errors, so errors in removal
# are quite expected
Log("Note: error detected during instance remove: %s", err)
else: # non-expected error
raise
return constants.EXIT_SUCCESS
def Main():
"""Main function.
"""
utils.SetupLogging(pathutils.LOG_BURNIN, sys.argv[0],
debug=False, stderr_logging=True)
return Burner().BurninCluster()
| vladimir-ipatov/ganeti | lib/tools/burnin.py | Python | gpl-2.0 | 42,651 | 0.00823 |
from django.views.generic.base import ContextMixin
class PaginationPages(ContextMixin):
"""
Class is for extending the context with pages_list and url_without_page for pagination.
"""
def get_context_data(self, **kwargs):
"""
Function extends the context with pages_list and url_without_page for pagination.
"""
from urllib.parse import urlencode
context = super().get_context_data(**kwargs)
paginator = context['paginator']
pages = list(paginator.page_range)
current_page = context['page_obj'].number
count = paginator.num_pages
start = current_page - self.page_dif
if start < 1:
start = 1
finish = current_page + self.page_dif
pages_list = []
if start > 1:
pages_list.append(1)
if start > 2:
pages_list.append('...')
pages_list.extend(pages[start - 1:finish]) # Range don't start with 1,
# we get pages with numbers
# from start to finish.
if finish + 1 < count:
pages_list.append('...')
if finish < count:
pages_list.append(count)
context['pages_list'] = pages_list
get = dict(self.request.GET.copy())
if 'page' in get:
del get['page']
params = urlencode(get, doseq=True)
context['url_without_page'] = self.request.path + '?' + params + '&page='
return context
| rudikovrf/django_blog | generic/pagination.py | Python | mit | 1,556 | 0.003213 |
"""Tests for init functions."""
from datetime import timedelta
from zoneminder.zm import ZoneMinder
from homeassistant import config_entries
from homeassistant.components.zoneminder import const
from homeassistant.components.zoneminder.common import is_client_in_data
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import (
ATTR_ID,
ATTR_NAME,
CONF_HOST,
CONF_PASSWORD,
CONF_PATH,
CONF_SOURCE,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import MagicMock, patch
from tests.common import async_fire_time_changed
async def test_no_yaml_config(hass: HomeAssistant) -> None:
"""Test empty yaml config."""
with patch(
"homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder
) as zoneminder_mock:
zm_client: ZoneMinder = MagicMock(spec=ZoneMinder)
zm_client.get_zms_url.return_value = "http://host1/path_zms1"
zm_client.login.return_value = True
zm_client.get_monitors.return_value = []
zoneminder_mock.return_value = zm_client
hass_config = {const.DOMAIN: []}
await async_setup_component(hass, const.DOMAIN, hass_config)
await hass.async_block_till_done()
assert not hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
async def test_yaml_config_import(hass: HomeAssistant) -> None:
"""Test yaml config import."""
with patch(
"homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder
) as zoneminder_mock:
zm_client: ZoneMinder = MagicMock(spec=ZoneMinder)
zm_client.get_zms_url.return_value = "http://host1/path_zms1"
zm_client.login.return_value = True
zm_client.get_monitors.return_value = []
zoneminder_mock.return_value = zm_client
hass_config = {const.DOMAIN: [{CONF_HOST: "host1"}]}
await async_setup_component(hass, const.DOMAIN, hass_config)
await hass.async_block_till_done()
assert hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
async def test_load_call_service_and_unload(hass: HomeAssistant) -> None:
"""Test config entry load/unload and calling of service."""
with patch(
"homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder
) as zoneminder_mock:
zm_client: ZoneMinder = MagicMock(spec=ZoneMinder)
zm_client.get_zms_url.return_value = "http://host1/path_zms1"
zm_client.login.side_effect = [True, True, False, True]
zm_client.get_monitors.return_value = []
zm_client.is_available.return_value = True
zoneminder_mock.return_value = zm_client
await hass.config_entries.flow.async_init(
const.DOMAIN,
context={CONF_SOURCE: config_entries.SOURCE_USER},
data={
CONF_HOST: "host1",
CONF_USERNAME: "username1",
CONF_PASSWORD: "password1",
CONF_PATH: "path1",
const.CONF_PATH_ZMS: "path_zms1",
CONF_SSL: False,
CONF_VERIFY_SSL: True,
},
)
await hass.async_block_till_done()
config_entry = next(iter(hass.config_entries.async_entries(const.DOMAIN)), None)
assert config_entry
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
assert not is_client_in_data(hass, "host1")
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_LOADED
assert is_client_in_data(hass, "host1")
assert hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
await hass.services.async_call(
const.DOMAIN,
const.SERVICE_SET_RUN_STATE,
{ATTR_ID: "host1", ATTR_NAME: "away"},
)
await hass.async_block_till_done()
zm_client.set_active_state.assert_called_with("away")
await config_entry.async_unload(hass)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_NOT_LOADED
assert not is_client_in_data(hass, "host1")
assert not hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
| tchellomello/home-assistant | tests/components/zoneminder/test_init.py | Python | apache-2.0 | 4,539 | 0.001763 |
"""
The MIT License (MIT)
Copyright © 2015 RealDolos
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the “Software”), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
#pylint: disable=unused-argument
import logging
from time import time
from datetime import datetime, timedelta
from humanize import naturaldelta
from cachetools import LRUCache
from .._version import __version__, __fulltitle__
from ..utils import get_text, get_json
from .command import Command, PulseCommand
from .db import DBCommand
__all__ = [
"NiggersCommand",
"CheckModCommand",
"AboutCommand",
"SeenCommand",
"AsleepCommand",
]
LOGGER = logging.getLogger(__name__)
class NiggersCommand(Command):
handlers = "!niggers", "!obamas"
def __init__(self, *args, **kw):
self.blacks = kw.get("args").blacks
self.obamas = kw.get("args").obamas
super().__init__(*args, **kw)
def handle_niggers(self, cmd, remainder, msg):
if not self.allowed(msg):
return False
self.post("{}, the following black gentlemen cannot use this bot: {}",
msg.nick, ", ".join(self.blacks))
return True
def handle_obamas(self, cmd, remainder, msg):
if not self.allowed(msg):
return False
self.post("{}, the following half-black gentlemen can only use this bot "
"once every couple of minutes: {}",
msg.nick, ", ".join(self.obamas))
return True
class CheckModCommand(Command):
handlers = ":check"
def staff(self, user):
if user.lower() in ("mercwmouth",):
self.post(
"Yes, unfortunately the literally brown pajeet hitler "
"and pretend lawyer {} is still a marginally trusted user",
user)
elif user.lower() in ("thersanderia",):
self.post("Yes, {} is a Trusted Jizzcook with not one but two Michelin stars!", user)
elif user.lower() in ("kalyx", "mercwmouth", "davinci", "liquid"):
self.post(
"Yes, unfortunately the fag "
"{} is still a marginally trusted user",
user)
else:
self.post("Yes, {} is still a marginally trusted user", user)
def admin(self, user):
if user.lower() in ("mercwmouth",):
self.post(
"Yes, unfortunately the literally brown pajeet hitler "
"and pretend lawyer {} is still a marginally mod user",
user)
elif user.lower() in ("kalyx", "mercwmouth", "davinci", "liquid"):
self.post("Yes, unfortunately the fag {} is still a mod", user)
elif user.lower() == "ptc":
self.post("Sweet jewprince is well and alive, unlike Y!erizon")
else:
self.post("Yes, {} is still a mod", user)
def user(self, user):
if user.lower() in ("mercwmouth",):
self.post(
"The literally brown pajeet hitler and pretend lawyer "
"{} is only a designated user",
user)
if user.lower() == "ptc":
self.post("Rest in pieces, sweet jewprince")
elif user.lower() == "liquid":
self.post("pls, Liquid will never be a mod")
else:
self.post("{} is not trusted, at all!", user)
def handle_check(self, cmd, remainder, msg):
remainder = remainder.strip()
user = remainder if remainder and " " not in remainder else "MercWMouth"
LOGGER.debug("Getting user %s", user)
try:
text, exp = get_text("https://volafile.org/user/{}".format(user))
if time() - exp > 120:
get_text.cache_clear()
get_json.cache_clear()
text, exp = get_text("https://volafile.org/user/{}".format(user))
if "Error 404" in text:
LOGGER.info("Not a user %s", user)
return False
i = self.room.conn.make_api_call("getUserInfo", params=dict(name=user))
if i.get("staff"):
self.staff(user)
elif i.get("admin"):
self.admin(user)
else:
self.user(user)
return True
except Exception:
LOGGER.exception("huh?")
return False
class AboutCommand(Command):
handlers = "!about", ".about", "!parrot"
def handle_cmd(self, cmd, remainder, msg):
if not self.allowed(msg):
return False
self.post(
"{}, I am {}, watch me fly:\n{}",
remainder or msg.nick,
__fulltitle__,
"https://github.com/RealDolos/volaparrot/")
return True
class SeenCommand(DBCommand, Command, PulseCommand):
interval = 5 * 60
seen = LRUCache(maxsize=50)
start = time()
usermap = {
"auxo's waifu": "triggu",
"doiosodolos": "Daniel",
"cirno": "Daniel",
"haskell": "Daniel",
"ekecheiria": "Daniel",
"baronbone": "Daniel",
"cyberia": "Daniel",
"countcoccyx": "Daniel",
"doc": "Dongmaster",
"jewmobile": "TheJIDF",
"jew": "TheJIDF",
"thejew": "TheJIDF",
"mrshlomo": "TheJIDF",
"pedo": "Counselor",
"pede": "Counselor",
"briseis": "Counselor",
"notnot": "Counselor",
"counselorpedro": "Counselor",
"marky": "SuperMarky",
"mcgill": "SuperMarky",
"voladolos": "SuperMarky",
"affarisdolos": "RealDolos",
"gnuwin7dolos": "RealDolos",
"cuck": "RealDolos",
"merc": "MercWMouth",
"cunt": "MercWMouth",
"kak": "MercWMouth",
"dolosodolos": "MODChatBotGladio",
"fakedolos": "kreg",
"laindolos": "kreg",
"fakekreg": "kreg",
"DaVinci": "Ian",
"CuckDolos": "Ian",
"DolosCuck": "Ian",
"apha": "Polish plebbit pedo",
"wombatfucker": "NEPTVola",
}
def mapname(self, name):
if name.startswith("Xsa"):
return "Xsa"
return self.usermap.get(name.lower(), name)
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
self.conn.execute("CREATE TABLE IF NOT EXISTS seen ("
"user TEXT PRIMARY KEY, "
"time INT"
")")
try:
cur = self.conn.cursor()
cur.execute("SELECT time FROM seen ORDER BY time ASC LIMIT 1")
self.start = int(cur.fetchone()[0]) / 1000
except Exception:
LOGGER.exception("Failed to get min seen")
def handles(self, cmd):
return True
def onpulse(self, pulse):
try:
LOGGER.debug("Dumping seen to db")
cur = self.conn.cursor()
cur.executemany(
"INSERT OR REPLACE INTO seen VALUES(?, ?)",
list((u, int(v * 1000)) for u, v in self.seen.items())
)
except Exception:
LOGGER.exception("Failed to update seen")
def handle_cmd(self, cmd, remainder, msg):
nick = self.mapname(msg.nick).casefold()
self.seen[nick] = time()
if msg.admin or msg.staff:
self.seen["@{}".format(nick)] = time()
if msg.logged_in:
self.seen["+{}".format(nick)] = time()
if cmd != "!seen":
return False
if not self.allowed(msg) or not remainder:
return False
remainder = remainder.strip()
remainder = self.mapname(remainder)
crem = remainder.casefold()
seen = self.seen.get(crem)
if not seen:
cur = self.conn.cursor()
cur.execute("SELECT time FROM seen WHERE user = ?", (crem,))
seen = cur.fetchone()
seen = seen and int(seen[0]) / 1000
if remainder.lower() == "lain":
self.post(
"Lain was never here, will never come here, "
"and does not care about volafile at all. Please donate!")
elif not seen:
self.post(
"I have not seen {} since {}",
remainder, naturaldelta(time() - self.start))
else:
self.post(
"{} was last seen {} ago",
remainder, naturaldelta(time() - seen))
return True
class AsleepCommand(Command):
last = None
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
def handles(self, cmd):
return True
def handle_cmd(self, cmd, remainder, msg):
if msg.admin and msg.logged_in:
AsleepCommand.last = datetime.now(), msg.nick
if cmd != "!asleep":
return False
if not AsleepCommand.last:
self.post("Mods are asleep")
elif AsleepCommand.last[0] + timedelta(minutes=20) <= datetime.now():
self.post("Mods have been asleep since {}", naturaldelta(AsleepCommand.last[0]))
else:
self.post(
"{} was awake and trolling {} ago",
AsleepCommand.last[1],
naturaldelta(AsleepCommand.last[0]))
return True
| RealDolos/volaparrot | volaparrot/commands/info.py | Python | mit | 10,159 | 0.001084 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-15 19:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('booths', '0002_auto_20171116_0045'),
]
operations = [
migrations.AlterField(
model_name='booth',
name='rental_fee',
field=models.DecimalField(decimal_places=2, max_digits=20, verbose_name='Rental Fee'),
),
]
| we-inc/mms-snow-white-and-the-seven-pandas | webserver/apps/booths/migrations/0003_auto_20171116_0220.py | Python | mit | 501 | 0.001996 |
import rdflib
from rdflib.term import URIRef, Variable
from PyOpenWorm.dataObject import DataObject, InverseProperty
from PyOpenWorm.context import Context
from PyOpenWorm.context_store import ContextStore
from .DataTestTemplate import _DataTest
try:
from unittest.mock import MagicMock, Mock
except ImportError:
from mock import MagicMock, Mock
class ContextTest(_DataTest):
def test_inverse_property_context(self):
class A(DataObject):
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.a = A.ObjectProperty(value_type=B)
class B(DataObject):
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.b = B.ObjectProperty(value_type=A)
InverseProperty(B, 'b', A, 'a')
ctx1 = Context(ident='http://example.org/context_1')
ctx2 = Context(ident='http://example.org/context_2')
a = ctx1(A)(ident='a')
b = ctx2(B)(ident='b')
a.a(b)
expected = (URIRef('b'), URIRef('http://openworm.org/entities/B/b'), URIRef('a'))
self.assertIn(expected, list(ctx1.contents_triples()))
def test_defined(self):
class A(DataObject):
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.a = A.ObjectProperty(value_type=B)
def defined_augment(self):
return self.a.has_defined_value()
def identifier_augment(self):
return self.make_identifier(self.a.onedef().identifier.n3())
class B(DataObject):
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.b = B.ObjectProperty(value_type=A)
InverseProperty(B, 'b', A, 'a')
ctx1 = Context(ident='http://example.org/context_1')
ctx2 = Context(ident='http://example.org/context_2')
a = ctx1(A)()
b = ctx2(B)(ident='b')
a.a(b)
self.assertTrue(a.defined)
def test_save_context_no_graph(self):
ctx = Context()
del ctx.conf['rdf.graph']
with self.assertRaisesRegexp(Exception, r'graph'):
ctx.save_context()
def test_context_store(self):
class A(DataObject):
pass
ctx = Context(ident='http://example.com/context_1')
ctx(A)(ident='anA')
self.assertIn(URIRef('anA'),
tuple(x.identifier for x in ctx.query(A)().load()))
def test_decontextualize(self):
class A(DataObject):
pass
ctx = Context(ident='http://example.com/context_1')
ctxda = ctx(A)(ident='anA')
self.assertIsNone(ctxda.decontextualize().context)
def test_init_imports(self):
ctx = Context(ident='http://example.com/context_1')
self.assertEqual(len(list(ctx.imports)), 0)
def test_zero_imports(self):
ctx0 = Context(ident='http://example.com/context_0')
ctx = Context(ident='http://example.com/context_1')
ctx.save_imports(ctx0)
self.assertEqual(len(ctx0), 0)
def test_save_import(self):
ctx0 = Context(ident='http://example.com/context_0')
ctx = Context(ident='http://example.com/context_1')
new_ctx = Context(ident='http://example.com/context_1')
ctx.add_import(new_ctx)
ctx.save_imports(ctx0)
self.assertEqual(len(ctx0), 1)
def test_add_import(self):
ctx0 = Context(ident='http://example.com/context_0')
ctx = Context(ident='http://example.com/context_1')
ctx2 = Context(ident='http://example.com/context_2')
ctx2_1 = Context(ident='http://example.com/context_2_1')
ctx.add_import(ctx2)
ctx.add_import(ctx2_1)
ctx3 = Context(ident='http://example.com/context_3')
ctx3.add_import(ctx)
final_ctx = Context(ident='http://example.com/context_1', imported=(ctx3,))
final_ctx.save_imports(ctx0)
self.assertEqual(len(ctx0), 4)
def test_init_len(self):
ctx = Context(ident='http://example.com/context_1')
self.assertEqual(len(ctx), 0)
def test_len(self):
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
self.assertEqual(len(ctx), 5)
def test_add_remove_statement(self):
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
stmt_to_remove = create_mock_statement(ident_uri, 42)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
ctx.add_statement(stmt_to_remove)
ctx.remove_statement(stmt_to_remove)
self.assertEqual(len(ctx), 5)
def test_add_statement_with_different_context(self):
ctx = Context(ident='http://example.com/context_1')
stmt1 = create_mock_statement('http://example.com/context_2', 1)
with self.assertRaises(ValueError):
ctx.add_statement(stmt1)
def test_contents_triples(self):
res_wanted = []
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
stmt = create_mock_statement(ident_uri, i)
ctx.add_statement(stmt)
res_wanted.append(stmt.to_triple())
for triples in ctx.contents_triples():
self.assertTrue(triples in res_wanted)
def test_clear(self):
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
ctx.clear()
self.assertEqual(len(ctx), 0)
def test_save_context(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
for i in range(5):
ctx.add_statement(create_mock_statement(ident_uri, i))
ctx.save_context(graph)
self.assertEqual(len(graph), 5)
def test_save_context_with_inline_imports(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ident_uri2 = 'http://example.com/context_2'
ident_uri2_1 = 'http://example.com/context_2_1'
ident_uri3 = 'http://example.com/context_3'
ident_uri4 = 'http://example.com/context_4'
ctx = Context(ident=ident_uri)
ctx2 = Context(ident=ident_uri2)
ctx2_1 = Context(ident=ident_uri2_1)
ctx.add_import(ctx2)
ctx.add_import(ctx2_1)
ctx3 = Context(ident=ident_uri3)
ctx3.add_import(ctx)
last_ctx = Context(ident=ident_uri4)
last_ctx.add_import(ctx3)
ctx.add_statement(create_mock_statement(ident_uri, 1))
ctx2.add_statement(create_mock_statement(ident_uri2, 2))
ctx2_1.add_statement(create_mock_statement(ident_uri2_1, 2.1))
ctx3.add_statement(create_mock_statement(ident_uri3, 3))
last_ctx.add_statement(create_mock_statement(ident_uri4, 4))
last_ctx.save_context(graph, True)
self.assertEqual(len(graph), 5)
def test_triples_saved(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ident_uri2 = 'http://example.com/context_2'
ident_uri2_1 = 'http://example.com/context_2_1'
ident_uri3 = 'http://example.com/context_3'
ident_uri4 = 'http://example.com/context_4'
ctx = Context(ident=ident_uri)
ctx2 = Context(ident=ident_uri2)
ctx2_1 = Context(ident=ident_uri2_1)
ctx.add_import(ctx2)
ctx.add_import(ctx2_1)
ctx3 = Context(ident=ident_uri3)
ctx3.add_import(ctx)
last_ctx = Context(ident=ident_uri4)
last_ctx.add_import(ctx3)
ctx.add_statement(create_mock_statement(ident_uri, 1))
ctx2.add_statement(create_mock_statement(ident_uri2, 2))
ctx2_1.add_statement(create_mock_statement(ident_uri2_1, 2.1))
ctx3.add_statement(create_mock_statement(ident_uri3, 3))
last_ctx.add_statement(create_mock_statement(ident_uri4, 4))
last_ctx.save_context(graph, True)
self.assertEqual(last_ctx.triples_saved, 5)
def test_triples_saved_noundef_triples_counted(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ctx = Context(ident=ident_uri)
statement = MagicMock()
statement.context.identifier = rdflib.term.URIRef(ident_uri)
statement.to_triple.return_value = (Variable('var'), 1, 2)
ctx.add_statement(statement)
ctx.save_context(graph)
self.assertEqual(ctx.triples_saved, 0)
def test_triples_saved_multi(self):
graph = set()
ident_uri = 'http://example.com/context_1'
ident_uri1 = 'http://example.com/context_11'
ident_uri2 = 'http://example.com/context_12'
ctx = Context(ident=ident_uri)
ctx1 = Context(ident=ident_uri1)
ctx2 = Context(ident=ident_uri2)
ctx2.add_import(ctx)
ctx1.add_import(ctx2)
ctx1.add_import(ctx)
ctx.add_statement(create_mock_statement(ident_uri, 1))
ctx1.add_statement(create_mock_statement(ident_uri1, 3))
ctx2.add_statement(create_mock_statement(ident_uri2, 2))
ctx1.save_context(graph, inline_imports=True)
self.assertEqual(ctx1.triples_saved, 3)
def test_context_getter(self):
ctx = Context(ident='http://example.com/context_1')
self.assertIsNone(ctx.context)
def test_context_setter(self):
ctx = Context(ident='http://example.com/context_1')
ctx.context = 42
self.assertEqual(ctx.context, 42)
class ContextStoreTest(_DataTest):
def test_query(self):
rdf_type = 'http://example.org/A'
ctxid = URIRef('http://example.com/context_1')
ctx = Mock()
graph = Mock()
graph.store.triples.side_effect = ([], [((URIRef('anA0'), rdflib.RDF.type, rdf_type), (ctxid,))],)
ctx.conf = {'rdf.graph': graph}
ctx.contents_triples.return_value = [(URIRef('anA'), rdflib.RDF.type, rdf_type)]
ctx.identifier = ctxid
ctx.imports = []
store = ContextStore(ctx, include_stored=True)
self.assertEqual(set([URIRef('anA'), URIRef('anA0')]),
set(x[0][0] for x in store.triples((None, rdflib.RDF.type, rdf_type))))
def test_contexts_staged_ignores_stored(self):
ctxid0 = URIRef('http://example.com/context_0')
ctxid1 = URIRef('http://example.com/context_1')
ctx = Mock()
graph = Mock()
graph.store.triples.side_effect = [[((None, None, ctxid0), ())], []]
ctx.conf = {'rdf.graph': graph}
ctx.contents_triples.return_value = ()
ctx.identifier = ctxid1
ctx.imports = []
store = ContextStore(ctx)
self.assertNotIn(ctxid0, set(store.contexts()))
def test_contexts_combined(self):
ctxid0 = URIRef('http://example.com/context_0')
ctxid1 = URIRef('http://example.com/context_1')
ctx = Mock()
graph = Mock()
graph.store.triples.side_effect = [[((None, None, ctxid0), ())], []]
ctx.conf = {'rdf.graph': graph}
ctx.contents_triples.return_value = ()
ctx.identifier = ctxid1
ctx.imports = []
store = ContextStore(ctx, include_stored=True)
self.assertEqual(set([ctxid0, ctxid1]),
set(store.contexts()))
def test_len_fail(self):
ctx = Mock()
graph = Mock()
ctx.conf = {'rdf.graph': graph}
ctx.contents_triples.return_value = ()
ctx.imports = []
store = ContextStore(ctx, include_stored=True)
with self.assertRaises(NotImplementedError):
len(store)
def create_mock_statement(ident_uri, stmt_id):
statement = MagicMock()
statement.context.identifier = rdflib.term.URIRef(ident_uri)
statement.to_triple.return_value = (True, stmt_id, -stmt_id)
return statement
| gsarma/PyOpenWorm | tests/ContextTest.py | Python | mit | 12,077 | 0.000414 |
"""Helpers that help with state related things."""
import asyncio
import datetime as dt
import json
import logging
from collections import defaultdict
from types import TracebackType
from typing import ( # noqa: F401 pylint: disable=unused-import
Awaitable, Dict, Iterable, List, Optional, Tuple, Type, Union)
from homeassistant.loader import bind_hass
import homeassistant.util.dt as dt_util
from homeassistant.components.notify import (
ATTR_MESSAGE, SERVICE_NOTIFY)
from homeassistant.components.sun import (
STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON)
from homeassistant.components.mysensors.switch import (
ATTR_IR_CODE, SERVICE_SEND_IR_CODE)
from homeassistant.components.cover import (
ATTR_POSITION, ATTR_TILT_POSITION)
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER,
SERVICE_LOCK, SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_UNLOCK,
SERVICE_OPEN_COVER,
SERVICE_CLOSE_COVER, SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION, STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED,
STATE_CLOSED, STATE_HOME, STATE_LOCKED, STATE_NOT_HOME, STATE_OFF,
STATE_ON, STATE_OPEN, STATE_UNKNOWN,
STATE_UNLOCKED, SERVICE_SELECT_OPTION)
from homeassistant.core import (
Context, State, DOMAIN as HASS_DOMAIN)
from homeassistant.util.async_ import run_coroutine_threadsafe
from .typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
GROUP_DOMAIN = 'group'
# Update this dict of lists when new services are added to HA.
# Each item is a service with a list of required attributes.
SERVICE_ATTRIBUTES = {
SERVICE_NOTIFY: [ATTR_MESSAGE],
SERVICE_SEND_IR_CODE: [ATTR_IR_CODE],
SERVICE_SELECT_OPTION: [ATTR_OPTION],
SERVICE_SET_COVER_POSITION: [ATTR_POSITION],
SERVICE_SET_COVER_TILT_POSITION: [ATTR_TILT_POSITION]
}
# Update this dict when new services are added to HA.
# Each item is a service with a corresponding state.
SERVICE_TO_STATE = {
SERVICE_TURN_ON: STATE_ON,
SERVICE_TURN_OFF: STATE_OFF,
SERVICE_ALARM_ARM_AWAY: STATE_ALARM_ARMED_AWAY,
SERVICE_ALARM_ARM_HOME: STATE_ALARM_ARMED_HOME,
SERVICE_ALARM_DISARM: STATE_ALARM_DISARMED,
SERVICE_ALARM_TRIGGER: STATE_ALARM_TRIGGERED,
SERVICE_LOCK: STATE_LOCKED,
SERVICE_UNLOCK: STATE_UNLOCKED,
SERVICE_OPEN_COVER: STATE_OPEN,
SERVICE_CLOSE_COVER: STATE_CLOSED
}
class AsyncTrackStates:
"""
Record the time when the with-block is entered.
Add all states that have changed since the start time to the return list
when with-block is exited.
Must be run within the event loop.
"""
def __init__(self, hass: HomeAssistantType) -> None:
"""Initialize a TrackStates block."""
self.hass = hass
self.states = [] # type: List[State]
# pylint: disable=attribute-defined-outside-init
def __enter__(self) -> List[State]:
"""Record time from which to track changes."""
self.now = dt_util.utcnow()
return self.states
def __exit__(self, exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType]) -> None:
"""Add changes states to changes list."""
self.states.extend(get_changed_since(self.hass.states.async_all(),
self.now))
def get_changed_since(states: Iterable[State],
utc_point_in_time: dt.datetime) -> List[State]:
"""Return list of states that have been changed since utc_point_in_time."""
return [state for state in states
if state.last_updated >= utc_point_in_time]
@bind_hass
def reproduce_state(hass: HomeAssistantType,
states: Union[State, Iterable[State]],
blocking: bool = False) -> None:
"""Reproduce given state."""
return run_coroutine_threadsafe( # type: ignore
async_reproduce_state(hass, states, blocking), hass.loop).result()
@bind_hass
async def async_reproduce_state(
hass: HomeAssistantType,
states: Union[State, Iterable[State]],
blocking: bool = False,
context: Optional[Context] = None) -> None:
"""Reproduce a list of states on multiple domains."""
if isinstance(states, State):
states = [states]
to_call = defaultdict(list) # type: Dict[str, List[State]]
for state in states:
to_call[state.domain].append(state)
async def worker(domain: str, data: List[State]) -> None:
component = getattr(hass.components, domain)
if hasattr(component, 'async_reproduce_states'):
await component.async_reproduce_states(
data,
context=context)
else:
await async_reproduce_state_legacy(
hass,
domain,
data,
blocking=blocking,
context=context)
if to_call:
# run all domains in parallel
await asyncio.gather(*[
worker(domain, data)
for domain, data in to_call.items()
])
@bind_hass
async def async_reproduce_state_legacy(
hass: HomeAssistantType,
domain: str,
states: Iterable[State],
blocking: bool = False,
context: Optional[Context] = None) -> None:
"""Reproduce given state."""
to_call = defaultdict(list) # type: Dict[Tuple[str, str], List[str]]
if domain == GROUP_DOMAIN:
service_domain = HASS_DOMAIN
else:
service_domain = domain
for state in states:
if hass.states.get(state.entity_id) is None:
_LOGGER.warning("reproduce_state: Unable to find entity %s",
state.entity_id)
continue
domain_services = hass.services.async_services().get(service_domain)
if not domain_services:
_LOGGER.warning(
"reproduce_state: Unable to reproduce state %s (1)", state)
continue
service = None
for _service in domain_services.keys():
if (_service in SERVICE_ATTRIBUTES and
all(attr in state.attributes
for attr in SERVICE_ATTRIBUTES[_service]) or
_service in SERVICE_TO_STATE and
SERVICE_TO_STATE[_service] == state.state):
service = _service
if (_service in SERVICE_TO_STATE and
SERVICE_TO_STATE[_service] == state.state):
break
if not service:
_LOGGER.warning(
"reproduce_state: Unable to reproduce state %s (2)", state)
continue
# We group service calls for entities by service call
# json used to create a hashable version of dict with maybe lists in it
key = (service,
json.dumps(dict(state.attributes), sort_keys=True))
to_call[key].append(state.entity_id)
domain_tasks = [] # type: List[Awaitable[Optional[bool]]]
for (service, service_data), entity_ids in to_call.items():
data = json.loads(service_data)
data[ATTR_ENTITY_ID] = entity_ids
domain_tasks.append(
hass.services.async_call(service_domain, service, data, blocking,
context)
)
if domain_tasks:
await asyncio.wait(domain_tasks, loop=hass.loop)
def state_as_number(state: State) -> float:
"""
Try to coerce our state to a number.
Raises ValueError if this is not possible.
"""
from homeassistant.components.climate import (
STATE_HEAT, STATE_COOL, STATE_IDLE)
if state.state in (STATE_ON, STATE_LOCKED, STATE_ABOVE_HORIZON,
STATE_OPEN, STATE_HOME, STATE_HEAT, STATE_COOL):
return 1
if state.state in (STATE_OFF, STATE_UNLOCKED, STATE_UNKNOWN,
STATE_BELOW_HORIZON, STATE_CLOSED, STATE_NOT_HOME,
STATE_IDLE):
return 0
return float(state.state)
| PetePriority/home-assistant | homeassistant/helpers/state.py | Python | apache-2.0 | 8,174 | 0 |
""" Common Authentication Handlers used across projects. """
from __future__ import absolute_import
import logging
import django.utils.timezone
from oauth2_provider import models as dot_models
from provider.oauth2 import models as dop_models
from rest_framework.exceptions import AuthenticationFailed
from rest_framework_oauth.authentication import OAuth2Authentication
OAUTH2_TOKEN_ERROR = u'token_error'
OAUTH2_TOKEN_ERROR_EXPIRED = u'token_expired'
OAUTH2_TOKEN_ERROR_MALFORMED = u'token_malformed'
OAUTH2_TOKEN_ERROR_NONEXISTENT = u'token_nonexistent'
OAUTH2_TOKEN_ERROR_NOT_PROVIDED = u'token_not_provided'
log = logging.getLogger(__name__)
class OAuth2AuthenticationAllowInactiveUser(OAuth2Authentication):
"""
This is a temporary workaround while the is_active field on the user is coupled
with whether or not the user has verified ownership of their claimed email address.
Once is_active is decoupled from verified_email, we will no longer need this
class override.
But until then, this authentication class ensures that the user is logged in,
but does not require that their account "is_active".
This class can be used for an OAuth2-accessible endpoint that allows users to access
that endpoint without having their email verified. For example, this is used
for mobile endpoints.
"""
def authenticate(self, *args, **kwargs):
"""
Returns two-tuple of (user, token) if access token authentication
succeeds, raises an AuthenticationFailed (HTTP 401) if authentication
fails or None if the user did not try to authenticate using an access
token.
"""
try:
return super(OAuth2AuthenticationAllowInactiveUser, self).authenticate(*args, **kwargs)
except AuthenticationFailed as exc:
if isinstance(exc.detail, dict):
developer_message = exc.detail['developer_message']
error_code = exc.detail['error_code']
else:
developer_message = exc.detail
if 'No credentials provided' in developer_message:
error_code = OAUTH2_TOKEN_ERROR_NOT_PROVIDED
elif 'Token string should not contain spaces' in developer_message:
error_code = OAUTH2_TOKEN_ERROR_MALFORMED
else:
error_code = OAUTH2_TOKEN_ERROR
raise AuthenticationFailed({
u'error_code': error_code,
u'developer_message': developer_message
})
def authenticate_credentials(self, request, access_token):
"""
Authenticate the request, given the access token.
Overrides base class implementation to discard failure if user is
inactive.
"""
token = self.get_access_token(access_token)
if not token:
raise AuthenticationFailed({
u'error_code': OAUTH2_TOKEN_ERROR_NONEXISTENT,
u'developer_message': u'The provided access token does not match any valid tokens.'
})
elif token.expires < django.utils.timezone.now():
raise AuthenticationFailed({
u'error_code': OAUTH2_TOKEN_ERROR_EXPIRED,
u'developer_message': u'The provided access token has expired and is no longer valid.',
})
else:
return token.user, token
def get_access_token(self, access_token):
"""
Return a valid access token that exists in one of our OAuth2 libraries,
or None if no matching token is found.
"""
return self._get_dot_token(access_token) or self._get_dop_token(access_token)
def _get_dop_token(self, access_token):
"""
Return a valid access token stored by django-oauth2-provider (DOP), or
None if no matching token is found.
"""
token_query = dop_models.AccessToken.objects.select_related('user')
return token_query.filter(token=access_token).first()
def _get_dot_token(self, access_token):
"""
Return a valid access token stored by django-oauth-toolkit (DOT), or
None if no matching token is found.
"""
token_query = dot_models.AccessToken.objects.select_related('user')
return token_query.filter(token=access_token).first()
| ESOedX/edx-platform | openedx/core/lib/api/authentication.py | Python | agpl-3.0 | 4,363 | 0.002521 |
# -*- coding: utf8 -*-
import sys
import os
import pybossa_github_builder as plugin
from mock import patch
# Use the PyBossa test suite
sys.path.append(os.path.abspath("./pybossa/test"))
from default import with_context
def setUpPackage():
"""Setup the plugin."""
from default import flask_app
with flask_app.app_context():
settings = os.path.abspath('./settings_test.py')
flask_app.config.from_pyfile(settings)
plugin_dir = os.path.dirname(plugin.__file__)
plugin.PyBossaGitHubBuilder(plugin_dir).setup()
| alexandermendes/pybossa-github-builder | tests/__init__.py | Python | bsd-3-clause | 554 | 0.001805 |
# coding=utf-8
from psycopg2.extras import NamedTupleCursor, Json
from tornado.web import Application, HTTPError
from tornado import gen
from tornado.ioloop import IOLoop
from tornado.httpserver import HTTPServer
from tornado.options import parse_command_line
import momoko
import os
from bank import SelectQuestion, get_level_one_item
from base import BaseHandler, SessionBaseHandler
from settings import MAX_ANSWER_COUNT, DSN, COOKIE_SECRET
from utils import Flow, get_quiz_stage, Que, session_reset, CheckChoice
class QuestionnaireListHandler(BaseHandler):
@gen.coroutine
def get(self):
# 问卷列表
cursor = yield self.db.execute("SELECT id, name FROM questionnaire;")
q_list = cursor.fetchall()
self.render('index.html', q_list=q_list)
class QuestionHandler(SessionBaseHandler):
@gen.coroutine
def _check_q_exist_n_get_q_a(self, q_id):
"""
:param q_id:
:raise gen.Return: 返回去q_a,q是questionnaire,a是answer
"""
session_key = self.session_key
cursor = yield self.db.execute(
"""
SELECT answer.id as aid, answer.score_answer, answer.old_answer,
answer.order_answer, answer.try_count,
answer.has_finished, questionnaire.id, questionnaire.type, questionnaire.second,
questionnaire.flow, questionnaire.level_one_count from answer
INNER JOIN questionnaire ON answer.questionnaire_id = questionnaire.id
WHERE answer.questionnaire_id=%s
AND answer.session_key=%s;
""", (q_id, session_key)
)
# q_a的意思是questionnaire and answer
q_a = cursor.fetchone()
if not q_a:
cursor = yield self.db.execute("SELECT id, type, flow, level_one_count, second "
"FROM questionnaire WHERE id=%s;",
(q_id,))
q = cursor.fetchone()
if q:
cursor = yield self.db.execute("INSERT INTO answer (questionnaire_id, session_key, "
"score_answer, order_answer, old_answer) VALUES (%s, %s, %s, %s, %s)"
"RETURNING id AS aid, score_answer, "
"order_answer, old_answer, try_count, "
"has_finished;",
(q_id, session_key, Json({}), Json({}), Json({})))
ans = cursor.fetchone()
raise gen.Return((q, ans))
else:
raise HTTPError(404)
else:
raise gen.Return((q_a, q_a))
@gen.coroutine
def get(self, q_id):
session = self.session
q_a = yield self._check_q_exist_n_get_q_a(q_id)
q, ans = q_a
# 下面是session的键值
is_re_start = 'is_%s_re_start' % q_id
step = '%s_step' % q_id
stage = '%s_stage' % q_id
next_item = '%s_next_item' % q_id
step_count = '%s_step_count' % q_id
# 被试答题的过程
flow = Flow(flow=q.flow, name=session.session_key)
# 如果session不存在is_X_start_id,说明被试可能关闭了浏览器,所以重新启动测验
if not session.get(is_re_start, True):
# 判断测验的第一阶段是否处于结束位置
if session[stage] == 1:
next_item_list = session[next_item]
que = Que(*next_item_list.pop(0))
else:
next_item = session[next_item]
que = Que(*next_item)
# 将是否重新测验设定为真,则若关闭浏览器或刷新页面,则重启测验
session[is_re_start] = True
session[step] += 1
session[stage] = get_quiz_stage(session[step], session[stage], flow)
else:
# 开始测验或重启测验,session恢复出厂设置
session_reset(session, q_id)
# 测验作答次数+1
if ans.try_count > (MAX_ANSWER_COUNT - 1):
raise HTTPError(403)
# 之前的旧答案存入old_answer中
if ans.score_answer:
ans.old_answer.update(ans.score_answer)
ans.score_answer.clear()
ans.order_answer.clear()
# 第一阶段需要回答的题量
count = flow.get_level_item_count(1)
# 给用户展现的第一道试题
que = yield get_level_one_item(ans, session, q, count, self.db)
yield self.db.execute(
"UPDATE answer SET has_finished = false, try_count = try_count + 1, score_answer=%s, order_answer=%s, "
"old_answer=%s WHERE id=%s",
(Json(ans.score_answer), Json(ans.order_answer), Json(ans.old_answer), ans.aid)
)
# 总共答题量
session[step_count] = flow.total_item_count
yield self.db.execute("UPDATE question SET count = count + 1 WHERE id=%s", (que.id, ))
total_step_count = session[step_count]
current_step = session[step]
current_progress = int((current_step * 1.0 / total_step_count) * 100)
second = q.second
session['q_%s_id' % q_id] = que
yield self.save()
self.render('cat.html', que=que, current_progress=current_progress,
total_step_count=total_step_count, current_step=current_step,
q_id=q_id, second=second)
@gen.coroutine
def post(self, q_id):
session = self.session
q_a = yield self._check_q_exist_n_get_q_a(q_id)
q, ans = q_a
q_type = q.type
que = Que(*session.get('q_%s_id' % q_id))
que_choice = self.get_argument('question')
check_choice = CheckChoice(que_choice, que)
if check_choice.is_valid():
# 保存作答结果
value = check_choice.value
session['%s_score' % q_id].append(int(value))
ans.score_answer[str(que.id)]['score'] = value
ans.score_answer[str(que.id)]['choice'] = que_choice
# 生成重定向URL
SelectQuestionClass = getattr(SelectQuestion, q_type)
url = yield SelectQuestionClass(session=session, q=q, que_id=que.id,
ans=ans, db=self.db).get_que_then_redirect()
yield self.save()
self.redirect(url)
else:
# 数据不合格则返回原作答页面
current_step = session['%s_step' % q_id]
total_step_count = session['%s_step_count' % q_id]
current_progress = int((current_step * 1.0 / total_step_count) * 100)
second = q.second
self.render('cat.html', que=que, current_progress=current_progress,
total_step_count=total_step_count, current_step=current_step,
q_id=q_id, second=second)
class ResultHandler(BaseHandler):
@gen.coroutine
def _check_result_exist_n_get_q_a(self, q_id):
session_key = self.get_cookie('sessionid')
if not session_key:
raise HTTPError(404)
cursor = yield self.db.execute(
"""
SELECT answer.score_answer, answer.order_answer, answer.has_finished from answer
INNER JOIN questionnaire ON answer.questionnaire_id = questionnaire.id
WHERE answer.questionnaire_id=%s
AND answer.session_key=%s;
""", (q_id, session_key)
)
# q_a的意思是questionnaire and answer
q_a = cursor.fetchone()
if (not q_a) or (not q_a.has_finished):
raise HTTPError(404)
else:
raise gen.Return(q_a)
@gen.coroutine
def get(self, q_id):
q_a = yield self._check_result_exist_n_get_q_a(q_id)
self.render('result.html', q_a=q_a, q_id=q_id)
if __name__ == "__main__":
parse_command_line()
ioloop = IOLoop.instance()
application = Application([
(r"/", QuestionnaireListHandler),
(r"/cat/(\d+)", QuestionHandler),
(r"/result/(\d+)", ResultHandler)
],
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
cookie_secret=COOKIE_SECRET,
debug=True,
xsrf_cookies=True,
)
application.db = momoko.Pool(
dsn=DSN,
size=1,
ioloop=ioloop,
cursor_factory=NamedTupleCursor,
)
future = application.db.connect()
ioloop.add_future(future, lambda f: ioloop.stop())
ioloop.start()
future.result()
http_server = HTTPServer(application)
http_server.listen(8000, 'localhost')
ioloop.start()
| inuyasha2012/tornado-cat-example | example/main.py | Python | mit | 8,865 | 0.002237 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-07 14:42
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('quiz', '0011_auto_20170407_1440'),
]
operations = [
migrations.AlterUniqueTogether(
name='selectedanswer',
unique_together=set([('quiz', 'user'), ('question', 'answer')]),
),
]
| denys-zarubin/sweetheart_test | quiz/migrations/0012_auto_20170407_1442.py | Python | unlicense | 547 | 0 |
#!/usr/bin/env python
# -*- coding, utf-8 -*-
# FIDATA. Open-source system for analysis of financial and economic data
# Copyright © 2013 Basil Peace
# This file is part of FIDATA.
#
# FIDATA is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FIDATA is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FIDATA. If not, see <http://www.gnu.org/licenses/>.
from FIDATA import *
initArgParser('Importer of predefined data', defLogFilename = 'import.log')
initFIDATA()
from csv import DictReader
from os import path
from PIL import Image
classes = []
logging.info('Import of predefined data started')
# logging.info('Importing langs')
# reader = DictReader(open('langs.csv', 'r', encoding = 'UTF8'), delimiter = ';')
# for row in reader:
# Lang(FIDATA, row = row, write = True, tryGetFromDB = False)
# del reader
# commit()
# classes += [Lang]
logging.info('Importing scripts')
reader = DictReader(open('scripts.csv', 'r', encoding = 'UTF8'), delimiter = ';')
for row in reader:
Script(FIDATA, row = row, write = True, tryGetFromDB = False)
del reader
commit()
classes += [Script]
logging.info('Importing countries')
reader = DictReader(open('countries.csv', 'r', encoding = 'UTF8'), delimiter = ';')
for row in reader:
# parent_country
# associated_with
if row['alpha2_code'] == '':
row['alpha2_code'] = None
else:
flagFilename = 'flags\{:s}.png'.format(row['alpha2_code'].lower())
if path.exists(flagFilename):
row['flag'] = Image.open(flagFilename)
if row['gov_website'] == '':
row['gov_website'] = None
if row['stats_website'] == '':
row['stats_website'] = None
FIDATA.country(row = row, write = True, tryGetFromDB = False)
del reader
commit()
classes += [Country]
# logging.info('Importing issuers')
# reader = DictReader(open('issuers.csv', 'r', encoding = 'UTF8'), delimiter = ';')
# for row in reader:
# FIDATA.issuer(row = row, write = True, tryGetFromDB = False)
# del reader
# commit()
# classes += [Issuer]
# logging.info('Importing currencies')
# reader = DictReader(open('currencies.csv', 'r', encoding = 'UTF8'), delimiter = ';')
# for row in reader:
# row['instr_type'] = InstrumentType.Currency
# FIDATA.instrument(row = row, write = True, tryGetFromDB = False)
# del reader
# commit()
# logging.info('Importing instruments')
# reader = DictReader(open('instruments.csv', 'r', encoding = 'UTF8'), delimiter = ';')
# for row in reader:
# FIDATA.instrument(row = row, write = True, tryGetFromDB = False)
# del reader
# commit()
# classes += [Instrument]
logging.info('Importing markets')
reader = DictReader(open('markets.csv', 'r', encoding = 'UTF8'), delimiter = ';')
child_markets = list()
for row in reader:
if row['country_alpha2_code'] == '':
row['country'] = None
else:
row['country'] = FIDATA.country(row = {
'alpha2_code': row['country_alpha2_code'],
'name' : row['country_name']
})
if row['acronym'] == '':
row['acronym'] = None
if row['website'] == '':
row['website'] = None
if row['trade_organizer_symbol'] == '':
FIDATA.market(row = row, write = True, tryGetFromDB = False)
else:
child_markets.append((FIDATA.market(row = row, write = False, tryGetFromDB = False), row['trade_organizer_symbol']))
del reader
for (market, trade_organizer_symbol) in child_markets:
market.tradeOrganizer = FIDATA.market(row = {'symbol': trade_organizer_symbol})
market.write()
del child_markets
commit()
classes += [Market]
logging.info('Importing data providers')
reader = DictReader(open('data_providers.csv', 'r', encoding = 'UTF8'), delimiter = ';')
for row in reader:
if row['trade_organizer_symbol'] == '':
row['trade_organizer'] = None
else:
row['trade_organizer'] = FIDATA.market(row = {'symbol': row['trade_organizer_symbol']})
FIDATA.dataProvider(row = row, write = True, tryGetFromDB = False)
del reader
commit()
classes += [DataProvider]
logging.info('Import of predefined data finished')
FIDATA.analyze(classes)
| FIDATA/database-draft | predefined-data/import.py | Python | gpl-3.0 | 4,368 | 0.027033 |
#!/usr/bin/python
from sys import exit
import sys
from site_imgsrc import imgsrc
from site_imgur import imgur
from site_deviantart import deviantart
from site_photobucket import photobucket
from site_flickr import flickr
from site_twitter import twitter
from site_tumblr import tumblr
from site_instagram import instagram
from site_imagefap import imagefap
from site_imagebam import imagebam
from site_imagearn import imagearn
from site_xhamster import xhamster
from site_getgonewild import getgonewild
from site_anonib import anonib
from site_motherless import motherless
from site_4chan import fourchan
from site_occ import occ
from site_minus import minus
from site_gifyo import gifyo
from site_imgsrc import imgsrc
from site_five00px import five00px
from site_chickupload import chickupload
from site_cghub import cghub
from site_teenplanet import teenplanet
from site_chansluts import chansluts
from site_gonearch import gonearch
from site_chanarchive import chanarchive
from site_seenive import seenive
try:
#i = imgur('http://imgur.com/a/8vmpo/noscript')
#i = imgur('http://scopolamina.imgur.com/')
#i = imgur('http://fuckmyusername.imgur.com')
#i = imgur('http://imgur.com/a/brixs')
#i = imgur('http://imgur.com/a/nvE9y')
#i = imgur('http://spicymustard.imgur.com/') # empty user acct
#i = imagefap('http://www.imagefap.com/pictures/2885204/Kentucky-Craigslist')
#i = imagefap('http://www.imagefap.com/pictures/3958759/Busty-Selfshooter')
#i = imagefap('http://www.imagefap.com/pictures/3960306/teen-fun/')
#i = imagebam('http://www.imagebam.com/gallery/3e4u10fk034871hs6idcil6txauu3ru6/')
#i = imagebam('http://www.imagebam.com/image/1ca1ab109274357')
#i = imagebam('http://www.imagebam.com/gallery/g23rwux1oz1g6n9gzjqw2k4e6yblqxdu')
#i = deviantart('http://angelsfalldown1.deviantart.com/gallery/2498849')
#i = deviantart('http://angelsfalldown1.deviantart.com/gallery/2498856')
#i = deviantart('http://dreamersintheskies.deviantart.com/gallery/') # Gets more than gmi-total
#i = deviantart('http://dreambaloon.deviantart.com/gallery/')
#i = deviantart('http://easy-shutter.deviantart.com/gallery/42198389')
#i = deviantart('http://garv23.deviantart.com')
#i = deviantart('http://wrouinr.deviantart.com/')
#i = photobucket('http://s579.beta.photobucket.com/user/merkler/library/')
#i = photobucket('http://s1131.beta.photobucket.com/user/Beth_fan/library/')
#i = photobucket('http://s1069.beta.photobucket.com/user/mandymgray/library/Album%203')
#i = photobucket('http://s1216.beta.photobucket.com/user/Liebe_Dich/profile/')
#i = flickr('http://www.flickr.com/photos/beboastar/sets/72157630130722172/')
#i = flickr('https://secure.flickr.com/photos/peopleofplatt/sets/72157624572361792/with/6344610705/')
#i = flickr('http://www.flickr.com/photos/rphotoit/sets/72157631879138251/with/8525941976/')
#i = flickr('http://www.flickr.com/photos/29809540@N04/')
#i = twitter('https://twitter.com/darrow_ashley')
#i = twitter('https://twitter.com/lemandicandi')
#i = twitter('https://twitter.com/MrNMissesSmith')
#i = twitter('https://twitter.com/PBAprilLewis') # GONE
#i = twitter('https://twitter.com/EversSecrets') # GONE
#i = tumblr('http://caramiaphotography.tumblr.com/tagged/me')
#i = tumblr('http://1fakeyfake.tumblr.com')
#i = tumblr('http://mourning-sex.tumblr.com/tagged/me')
#i = tumblr('http://i-was-masturbating-when-i.tumblr.com/')
#i = instagram('http://web.stagram.com/n/glitterypubez/')
#i = imagearn('http://imagearn.com/gallery.php?id=128805')
#i = imagearn('http://imagearn.com/gallery.php?id=29839')
#i = imagearn('http://imagearn.com/image.php?id=5046077')
#i = xhamster('http://xhamster.com/photos/gallery/1306566/lovely_teen_naked_for_self_shots.html')
#i = xhamster('http://xhamster.com/photos/gallery/1443114/cute_teens.html')
#i = xhamster('http://xhamster.com/photos/gallery/1742221/amateur_black_girls_volume_4-2.html')
#i = getgonewild('http://getgonewild.com/profile/EW2d')
#i = getgonewild('http://getgonewild.com/s/miss_ginger_biscuit')
#i = getgonewild('http://getgonewild.com/profile/yaymuffinss')
#i = anonib('http://www.anonib.com/t/res/1780.html')
#i = anonib('http://www.anonib.com/t/res/5019.html')
#i = anonib('http://www.anonib.com/tblr/res/12475.html')
#i = anonib('http://www.anonib.com/t/res/1780+50.html')
#i = anonib('http://www.anonib.com/tblr/res/12475+50.html')
#i = motherless('http://motherless.com/GI39ADA2C')
#i = motherless('http://motherless.com/GABDCF08')
#i = motherless('http://motherless.com/G7DC1B74')
#i = motherless('http://motherless.com/GV9719092')
#i = fourchan('http://boards.4chan.org/s/res/14035564')
#i = occ('http://forum.oneclickchicks.com/showthread.php?t=137808')
#i = occ('http://forum.oneclickchicks.com/showthread.php?t=102994')
#i = occ('http://forum.oneclickchicks.com/album.php?albumid=12579')
#i = occ('http://forum.oneclickchicks.com/showthread.php?t=146037')
#i = minus('http://minus.com')
#i = minus('http://.minus.com')
#i = minus('http://i.minus.com')
#i = minus('http://www.minus.com')
#i = minus('http://zuzahgaming.minus.com/mF31aoo7kNdiM')
#i = minus('https://nappingdoneright.minus.com/mu6fuBNNdfPG0')
#i = minus('http://nappingdoneright.minus.com/mu6fuBNNdfPG0')
#i = minus('https://nappingdoneright.minus.com/')
#i = minus('https://nappingdoneright.minus.com')
#i = minus('https://nappingdoneright.minus.com/uploads')
#i = gifyo('http://gifyo.com/ccrystallinee/')
#i = gifyo('http://gifyo.com/deanandhepburn/') # private
#i = imgsrc('http://imgsrc.ru/main/pic.php?ad=774665')
#i = imgsrc('http://imgsrc.ru/jp101091/26666184.html?pwd=&lang=en#')
#i = imgsrc('http://imgsrc.ru/hugo004/21447611.html')
#i = imgsrc('http://imgsrc.ru/fotoivanov/a661729.html')
#i = imagefap('http://www.imagefap.com/pictures/1561127/young-porn-girlie-masterbating')
#i = imagefap('http://www.imagefap.com/pictures/3883233/Maya-Black-Hot-Ts-2013')
#i = xhamster('http://xhamster.com/photos/gallery/635024/kira_the_beautiful_busty_redhead_xxx.html')
#i = five00px('http://500px.com/xxxsweetxxx')
#i = imgur('http://imgur.com/r/realgirls/new/day/')
#i = imgur('http://imgur.com/r/amateurarchives/top/all/')
#i = chickupload('http://chickupload.com/gallery/106023/Z64FYY7Q')
#i = deviantart('http://depingo.deviantart.com/gallery/')
#i = teenplanet('http://photos.teenplanet.org/atomicfrog/Dromeus/Skinny_Babe_vs_Bfs_Cock')
#i = cghub('http://wacomonkey.cghub.com/images/', urls_only=True)
#i = fourchan('http://boards.4chan.org/s/res/14177077', urls_only=True)
#i = anonib('http://www.anonib.com/azn/res/74347.html', urls_only=True)
#i = chickupload('http://chickupload.com/gallery/30621/OMTDRPYU', urls_only=True)
#i = deviantart('http://kindi-k.deviantart.com/gallery/', urls_only=True)
#i = five00px('http://500px.com/xxxsweetxxx', urls_only=True)
#i = getgonewild('http://getgonewild.com/profile/twoholes101', urls_only=True)
#i = gifyo('http://gifyo.com/ccrystallinee/', urls_only=True)
#i = imagearn('http://imagearn.com/gallery.php?id=226220', urls_only=True)
#i = imagebam('http://www.imagebam.com/gallery/3e4u10fk034871hs6idcil6txauu3ru6/', urls_only=True)
#i = imagefap('http://www.imagefap.com/pictures/2885204/Kentucky-Craigslist', urls_only=True)
#i = imgsrc('http://imgsrc.ru/fotoivanov/a661729.html', urls_only=True)
#i = imgur('http://imgur.com/a/brixs', urls_only=True)
#i = instagram('http://web.stagram.com/n/glitterypubez/', urls_only=True)
#i = minus('http://zuzahgaming.minus.com/mF31aoo7kNdiM', urls_only=True)
#i = motherless('http://motherless.com/G7DC1B74', urls_only=True)
#i = tumblr('http://caramiaphotography.tumblr.com/tagged/me', urls_only=True)
#i = twitter('https://twitter.com/darrow_ashley', urls_only=True)
#i = xhamster('http://xhamster.com/photos/gallery/1443114/cute_teens.html', urls_only=True)
#i = occ('http://forum.oneclickchicks.com/showthread.php?t=137808', urls_only=True)
#i = chansluts('http://www.chansluts.com/camwhores/girls/res/9447.php')
#i = flickr('http://www.flickr.com/photos/sabrina-dacos/') # NSFW, "bad panda"
#i = flickr('http://www.flickr.com/photos/alifewortheating/sets/72157632351550870/')
#i = flickr('http://www.flickr.com/photos/vichollo/')
#i = c('', urls_only=True)
#http://vk.com/album-34908971_163639688
#i = gonearch('http://gonearchiving.com/indexpics.php?author=personally-yours')
#i = gonearch('http://gonearchiving.com/indexlist.php?author=nutmegster')
#i = chanarchive('http://chanarchive.org/4chan/a/70052/can-a-make-a-good-soundtrack-for-netorare')
#http://chanarchive.org/4chan/s/10261/madalina-pica
#i = photobucket('http://s1275.photobucket.com/user/ew4ever/library/EW', urls_only=True, debugging=True)
#i = imgsrc('http://imgsrc.ru/martin1989/a1022028.html?', debugging=True)
#i = imgsrc('http://imgsrc.ru/martin1989/a1003277.html?', debugging=True)
#i = imgsrc('http://imgsrc.ru/martin1989/a1021809.html?', debugging=True)
#i = imgsrc('http://imgsrc.ru/main/pic_tape.php?ad=678965&pwd=bbe4233b74a1ca3ca6ba0c0c84bfe12e', debugging=True)
for gall in sys.argv:
if (".py" in gall) or (gall=="sites/test.py"):
continue
else: print "START: gall: %s \n" % gall
i = minus(gall, debugging=True)
#i = imagefap(gall, debugging=True)
#i = seenive(gall, debugging=True)
print "Working_dir: %s url: %s" % (i.working_dir, i.url)
if i.existing_zip_path() != None:
print 'Zip exists: %s' % i.existing_zip_path()
else:
print 'downloading...'
if ('pic_tape' in gall):
pwd = i.original_url[i.original_url.find('&')+5:]
print "PWD: %s" % pwd
i.download(pwd=pwd)
else:
i.download()
'''
print 'checking for zip:',
print str(i.existing_zip_path())
print "zip = %s" % i.zip()
print 'checking for zip:',
print str(i.existing_zip_path())
if i.existing_zip_path().endswith('.txt'):
f = open(i.existing_zip_path(), 'r')
print f.read()
f.close()
'''
#except KeyboardInterrupt:
# print '\ninterrupted'
except Exception, e:
print "\nEXCEPTION: %s" % str(e)
| larsbegas/rip-master | sites/_minux_tst.py | Python | gpl-2.0 | 10,576 | 0.023166 |
"""
Implementation of stack data structure in Python.
"""
class Stack:
def __init__(self,*vargs):
self.stack = list(vargs)
def __repr__(self):
return str(self.stack)
def top(self):
return self.stack[0]
def push(self,elem):
self.stack.insert(0,elem)
def pop(self):
return self.stack.pop(0)
if __name__ == '__main__':
stk = Stack(1,2,3,4)
print stk
print stk.top()
stk.push(10)
print stk
print stk.pop()
print stk
| beqa2323/learntosolveit | languages/python/design_stack.py | Python | bsd-3-clause | 507 | 0.013807 |
"""
Author: Armon Dadgar
Description:
This test checks that the dylink pre-processor methods are working properly by "including"
the sockettimeout library. We then check that the functions work.
This test uses the old "include" directive
"""
# Import the sockettimeout library
include sockettimeout
def new_conn(ip,port,sock,ch1,ch2):
# Wait 3 seconds, then send data
sleep(2)
sock.send("Ping! Ping!")
sock.close()
if callfunc == "initialize":
# Check that we have the basic openconn,waitforconn and stopcomm
# This will throw an Attribute error if these are not set
check = timeout_openconn
check = timeout_waitforconn
check = timeout_stopcomm
# Get our ip
ip = getmyip()
port = 12345
# Setup a waitforconn
waith = timeout_waitforconn(ip,port,new_conn)
# Try to do a timeout openconn
sock = timeout_openconn(ip,port,timeout=2)
# Set the timeout to 1 seconds, and try to read
sock.settimeout(1)
try:
data = sock.recv(16)
# We should timeout
print "Bad! Got data: ",data
except:
pass
# Close the socket, and shutdown
sock.close()
timeout_stopcomm(waith)
| sburnett/seattle | seattlelib/tests/test_dylink_include.py | Python | mit | 1,136 | 0.033451 |
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Loss function for IMPALA (Espeholt et al., 2018) [1].
[1] https://arxiv.org/abs/1802.01561
"""
from typing import Callable
from acme.agents.jax.impala import types
from acme.jax import utils
import haiku as hk
import jax.numpy as jnp
import numpy as np
import reverb
import rlax
import tree
def impala_loss(
unroll_fn: types.PolicyValueFn,
*,
discount: float,
max_abs_reward: float = np.inf,
baseline_cost: float = 1.,
entropy_cost: float = 0.,
) -> Callable[[hk.Params, reverb.ReplaySample], jnp.DeviceArray]:
"""Builds the standard entropy-regularised IMPALA loss function.
Args:
unroll_fn: A `hk.Transformed` object containing a callable which maps
(params, observations_sequence, initial_state) -> ((logits, value), state)
discount: The standard geometric discount rate to apply.
max_abs_reward: Optional symmetric reward clipping to apply.
baseline_cost: Weighting of the critic loss relative to the policy loss.
entropy_cost: Weighting of the entropy regulariser relative to policy loss.
Returns:
A loss function with signature (params, data) -> loss_scalar.
"""
def loss_fn(params: hk.Params,
sample: reverb.ReplaySample) -> jnp.DeviceArray:
"""Batched, entropy-regularised actor-critic loss with V-trace."""
# Extract the data.
data = sample.data
observations, actions, rewards, discounts, extra = (data.observation,
data.action,
data.reward,
data.discount,
data.extras)
initial_state = tree.map_structure(lambda s: s[0], extra['core_state'])
behaviour_logits = extra['logits']
# Apply reward clipping.
rewards = jnp.clip(rewards, -max_abs_reward, max_abs_reward)
# Unroll current policy over observations.
(logits, values), _ = unroll_fn(params, observations, initial_state)
# Compute importance sampling weights: current policy / behavior policy.
rhos = rlax.categorical_importance_sampling_ratios(logits[:-1],
behaviour_logits[:-1],
actions[:-1])
# Critic loss.
vtrace_returns = rlax.vtrace_td_error_and_advantage(
v_tm1=values[:-1],
v_t=values[1:],
r_t=rewards[:-1],
discount_t=discounts[:-1] * discount,
rho_tm1=rhos)
critic_loss = jnp.square(vtrace_returns.errors)
# Policy gradient loss.
policy_gradient_loss = rlax.policy_gradient_loss(
logits_t=logits[:-1],
a_t=actions[:-1],
adv_t=vtrace_returns.pg_advantage,
w_t=jnp.ones_like(rewards[:-1]))
# Entropy regulariser.
entropy_loss = rlax.entropy_loss(logits[:-1], jnp.ones_like(rewards[:-1]))
# Combine weighted sum of actor & critic losses, averaged over the sequence.
mean_loss = jnp.mean(policy_gradient_loss + baseline_cost * critic_loss +
entropy_cost * entropy_loss) # []
return mean_loss
return utils.mapreduce(loss_fn, in_axes=(None, 0))
| deepmind/acme | acme/jax/losses/impala.py | Python | apache-2.0 | 3,849 | 0.001299 |
import rhinoscriptsyntax as rs
#Importamos random
import random as rn
#Creamos seed y listas para los puntos, lineas y triangulos
rn.seed(s)
pts = [pt0]
lines = []
triangles = []
newptList = [pt0]
#Iteramos para la creacion de cada punto linea y triangulo
for i in range(It):
#los puntos en polares con angulos y pasos controladp
angulos = rn.randint(Amin,AMAX)
steps = rn.randint(Rmin,RMAX)
NewPts = rs.Polar((pts[-1]),angulos,steps)
pts.append(NewPts)
#Una vez creado los puntos creamos los triangulos,
#Primero la linea y el punto medio
a = pts[i]
b = pts[i+1]
line = rs.AddLine(a,b)
lines.append(line)
#Sacamos el vector normal a la recta y lo escalamos por un random
rnleng = ((rn.randint(3,5))/10)
z = rs.CurveNormal(line)
vector = rs.VectorCreate(a,b)
nor = rs.VectorCrossProduct(vector,z)
normal = rs.VectorScale(nor,rnleng)
trans1 = rs.XformTranslation(normal)
trans2 = rs.XformTranslation(rs.VectorReverse(normal))
#Desplazamos los puntos medios el vector normal escalado
newpts1 = rs.PointTransform(a,trans1)
newpts2 = rs.PointTransform(a,trans2)
tri = rs.AddPolyline([b,newpts1,newpts2,b])
triangles.append(tri)
ptList = pts
| Fablab-Sevilla/ghPython101 | Día_002/01_EJ/T_002/Peter random walk final.py | Python | mit | 1,275 | 0.026709 |
# -*- encoding: utf-8 -*-
"""
Usage::
hammer organization [OPTIONS] SUBCOMMAND [ARG] ...
Parameters::
SUBCOMMAND subcommand
[ARG] ... subcommand arguments
Subcommands::
add-computeresource Associate a resource
add-configtemplate Associate a resource
add-domain Associate a resource
add-environment Associate a resource
add-hostgroup Associate a resource
add-location Associate a location
add-medium Associate a resource
add-smartproxy Associate a resource
add-subnet Associate a resource
add-user Associate a resource
create Create an organization
delete Delete an organization
delete-parameter Delete parameter for an organization.
info Show an organization
list List all organizations
remove_computeresource Disassociate a resource
remove_configtemplate Disassociate a resource
remove_domain Disassociate a resource
remove_environment Disassociate a resource
remove_hostgroup Disassociate a resource
remove-location Disassociate a location
remove_medium Disassociate a resource
remove_smartproxy Disassociate a resource
remove_subnet Disassociate a resource
remove_user Disassociate a resource
set-parameter Create or update parameter for an
organization.
update Update an organization
"""
from robottelo.cli.base import Base
class Org(Base):
"""Manipulates Foreman's Organizations"""
command_base = 'organization'
@classmethod
def add_compute_resource(cls, options=None):
"""Adds a computeresource to an org"""
cls.command_sub = 'add-compute-resource'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_compute_resource(cls, options=None):
"""Removes a computeresource from an org"""
cls.command_sub = 'remove-compute-resource'
return cls.execute(cls._construct_command(options))
@classmethod
def add_config_template(cls, options=None):
"""Adds a configtemplate to an org"""
cls.command_sub = 'add-config-template'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_config_template(cls, options=None):
"""Removes a configtemplate from an org"""
cls.command_sub = 'remove-config-template'
return cls.execute(cls._construct_command(options))
@classmethod
def add_domain(cls, options=None):
"""Adds a domain to an org"""
cls.command_sub = 'add-domain'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_domain(cls, options=None):
"""Removes a domain from an org"""
cls.command_sub = 'remove-domain'
return cls.execute(cls._construct_command(options))
@classmethod
def add_environment(cls, options=None):
"""Adds an environment to an org"""
cls.command_sub = 'add-environment'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_environment(cls, options=None):
"""Removes an environment from an org"""
cls.command_sub = 'remove-environment'
return cls.execute(cls._construct_command(options))
@classmethod
def add_hostgroup(cls, options=None):
"""Adds a hostgroup to an org"""
cls.command_sub = 'add-hostgroup'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_hostgroup(cls, options=None):
"""Removes a hostgroup from an org"""
cls.command_sub = 'remove-hostgroup'
return cls.execute(cls._construct_command(options))
@classmethod
def add_location(cls, options=None):
"""Adds a location to an org"""
cls.command_sub = 'add-location'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_location(cls, options=None):
"""Removes a location from an org"""
cls.command_sub = 'remove-location'
return cls.execute(cls._construct_command(options))
@classmethod
def add_medium(cls, options=None):
"""Adds a medium to an org"""
cls.command_sub = 'add-medium'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_medium(cls, options=None):
"""Removes a medium from an org"""
cls.command_sub = 'remove-medium'
return cls.execute(cls._construct_command(options))
@classmethod
def add_smart_proxy(cls, options=None):
"""Adds a smartproxy to an org"""
cls.command_sub = 'add-smart-proxy'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_smart_proxy(cls, options=None):
"""Removes a smartproxy from an org"""
cls.command_sub = 'remove-smart-proxy'
return cls.execute(cls._construct_command(options))
@classmethod
def add_subnet(cls, options=None):
"""Adds existing subnet to an org"""
cls.command_sub = 'add-subnet'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_subnet(cls, options=None):
"""Removes a subnet from an org"""
cls.command_sub = 'remove-subnet'
return cls.execute(cls._construct_command(options))
@classmethod
def add_user(cls, options=None):
"""Adds an user to an org"""
cls.command_sub = 'add-user'
return cls.execute(cls._construct_command(options))
@classmethod
def remove_user(cls, options=None):
"""Removes an user from an org"""
cls.command_sub = 'remove-user'
return cls.execute(cls._construct_command(options))
| ares/robottelo | robottelo/cli/org.py | Python | gpl-3.0 | 6,156 | 0 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Generator of histology report
"""
import logging
logger = logging.getLogger(__name__)
# import funkcí z jiného adresáře
import sys
import os.path
path_to_script = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(path_to_script, "../../lar-cc/lib/py/"))
sys.path.append(os.path.join(path_to_script, "../lisa/extern"))
sys.path.append(os.path.join(path_to_script, "../../pyplasm/src/pyplasm"))
import numpy as np
from scipy import mat, cos, sin
from larcc import VIEW, MKPOL, AA, INTERVALS, STRUCT, MAP, PROD
from larcc import UNITVECT, VECTPROD, PI, SUM, CAT, IDNT, UNITVECT
from splines import BEZIER, S1, S2, COONSPATCH
# from splines import *
# import mapper
#import hpc
#import pyplasm.hpc
import geometry3d as g3
import interpolation_pyplasm as ip
import skelet3d
import skelet3d.gt_lar_smooth
from skelet3d.gt_lar_smooth import GTLarSmooth
logger.warning("Module is moved to package skelet3d.gt_vtk. This placeholder will be removed in future")
# class GTLarSmooth:
#
# def __init__(self, gtree=None):
# """
# gtree is information about input data structure.
# endDistMultiplicator: make cylinder shorter by multiplication of radius
# """
# # input of geometry and topology
# self.V = []
# self.CV = []
# self.joints = {}
# self.joints_lar = []
# self.gtree = gtree
# self.endDistMultiplicator = 2
# self.use_joints = True
# #dir(splines)
# pass
#
# def add_cylinder(self, nodeA, nodeB, radius, cylinder_id):
#
# try:
# idA = tuple(nodeA) # self.gtree.tree_data[cylinder_id]['nodeIdA']
# idB = tuple(nodeB) # self.gtree.tree_data[cylinder_id]['nodeIdB']
# except:
# idA = 0
# idB = 0
# self.use_joints = False
#
# vect = np.array(nodeA) - np.array(nodeB)
# u = g3.perpendicular_vector(vect)
# u = u / np.linalg.norm(u)
# u = u.tolist()
# vect = vect.tolist()
#
#
#
# c1 = self.__circle(nodeA, radius, vect)
# c2 = self.__circle(nodeB, radius, vect)
# tube = BEZIER(S2)([c1,c2])
# domain = PROD([ INTERVALS(2*PI)(36), INTERVALS(1)(4) ])
# tube = MAP(tube)(domain)
#
#
# self.joints_lar.append(tube)
#
#
# #self.__draw_circle(nodeB, vect, radius)
#
# ##vector = (np.array(nodeA) - np.array(nodeB)).tolist()
#
# # mov circles to center of cylinder by size of radius because of joint
# ##nodeA = g3.translate(nodeA, vector,
# ## -radius * self.endDistMultiplicator)
# ##nodeB = g3.translate(nodeB, vector,
# ## radius * self.endDistMultiplicator)
#
# ##ptsA, ptsB = g3.cylinder_circles(nodeA, nodeB, radius, element_number=32)
# ##CVlistA = self.__construct_cylinder_end(ptsA, idA, nodeA)
# ##CVlistB = self.__construct_cylinder_end(ptsB, idB, nodeB)
#
# ##CVlist = CVlistA + CVlistB
#
# ##self.CV.append(CVlist)
#
# # lar add ball
# # ball0 = mapper.larBall(radius, angle1=PI, angle2=2*PI)([10, 16])
# # V, CV = ball0
# # # mapper.T
# # # ball = STRUCT(MKPOLS(ball0))
# #
# # # mapper.T(1)(nodeA[0])(mapper.T(2)(nodeA[1])(mapper.T(3)(nodeA[1])(ball)))
# #
# # lenV = len(self.V)
# #
# # self.V = self.V + (np.array(V) + np.array(nodeA)).tolist()
# # self.CV = self.CV + (np.array(CV) + lenV).tolist()
#
# def __circle(self, center=[0,0,0],radius=1,normal=[0,0,1],sign=1,shift=0):
# N = UNITVECT(normal)
# if N == [0,0,1] or N == [0,0,-1]: Q = mat(IDNT(3))
# else:
# QX = UNITVECT((VECTPROD([[0,0,1],N])))
# QZ = N
# QY = VECTPROD([QZ,QX])
# Q = mat([QX,QY,QZ]).T
# def circle0(p):
# u = p[0]
# x = radius*cos(sign*u+shift)
# y = radius*sin(sign*u+shift)
# z = 0
# return SUM([ center, CAT((Q*[[x],[y],[z]]).tolist()) ])
# return circle0
#
#
# def __construct_cylinder_end(self, pts, id, node):
# """
# creates end of cylinder and prepares for joints
# """
# CVlist = []
# # base
# ln = len(self.V)
#
# for i, pt in enumerate(pts):
# self.V.append(pt)
# CVlist.append(ln + i)
#
# try:
# self.joints[id].append([node, CVlist])
# except:
# self.joints[id] = [[node, CVlist]]
#
# return CVlist
#
# def __add_old_cylinder(self, nodeA, nodeB, radius):
# """
# deprecated simple representation of cylinder
# """
# nodeA = np.array(nodeA)
# nodeB = np.array(nodeB)
#
# ln = len(self.V)
# self.V.append(nodeB.tolist())
# self.V.append((nodeB + [2, 0, 0]).tolist())
# self.V.append((nodeB + [2, 2, 0]).tolist())
# self.V.append((nodeB + [2, 2, 2]).tolist())
# self.V.append((nodeA + [0, 0, 0]).tolist())
# self.CV.append([ln, ln + 1, ln + 2, ln + 3, ln + 4])
#
# def finish(self):
# print 'use joints? ', self.use_joints
# if self.use_joints:
# for joint in self.joints.values():
# # There is more then just one circle in this joint, so it
# # is not end of vessel
# if len(joint) > 1:
# self.__generate_joint(joint)
#
#
# def __half_plane(self, perp, plane_point, point):
# cdf = (np.array(point) - np.array(plane_point))
# out = perp[0] * cdf[0] +\
# perp[1] * cdf[1] + \
# perp[2] * cdf[2]
# return out > 0
#
# def __get_vessel_connection_curve(self, vessel_connection, perp, vec0, vec1):
# """
# perp is perpendicular to plane given by centers of circles
# vec1, vec0 are vectors from circle centers
# """
# curve_t = []
# curve_d = []
# curve_pts_indexes_t = []
# curve_pts_indexes_d = []
# brake_point_t = None
# brake_point_d = None
# center, circle = vessel_connection
#
# # left to right
# perp_lr = np.cross(perp, vec1)
#
# print 'center ', center
# print 'circle ', circle
# for vertex_id in circle:
# if ((len(curve_pts_indexes_t) > 0) and
# (vertex_id - curve_pts_indexes_t[-1]) > 1):
# brake_point_t = len(curve_pts_indexes_t)
# if ((len(curve_pts_indexes_d) > 0) and
# (vertex_id - curve_pts_indexes_d[-1]) > 1):
# brake_point_d = len(curve_pts_indexes_d)
#
# #hp = self.__half_plane(perp_lr, center, self.V[vertex_id])
# hp = self.__half_plane(perp, center, self.V[vertex_id])
#
#
# if(hp):
# curve_t.append(self.V[vertex_id])
# curve_pts_indexes_t.append(vertex_id)
# else:
# curve_d.append(self.V[vertex_id])
# curve_pts_indexes_d.append(vertex_id)
#
# ordered_curve_t = curve_t[brake_point_t:] + curve_t[:brake_point_t]
# ordered_pts_indexes_t = \
# curve_pts_indexes_t[brake_point_t:] +\
# curve_pts_indexes_t[:brake_point_t]
#
# ordered_curve_d = curve_d[brake_point_d:] + curve_d[:brake_point_d]
# ordered_pts_indexes_d = \
# curve_pts_indexes_d[brake_point_t:] +\
# curve_pts_indexes_d[:brake_point_d]
# #print ' hp v id ', curve_pts_indexes_t
# #print 'ord hp v id ', ordered_pts_indexes_t
#
# #print 'hp circle ', curve_one
#
# # add point from oposit half-circle
# first_pt_d = ordered_curve_d[0]
# last_pt_d = ordered_curve_d[-1]
# first_pt_t = ordered_curve_t[0]
# last_pt_t = ordered_curve_t[-1]
#
# ordered_curve_t.append(first_pt_d)
# ordered_curve_t.insert(0, last_pt_d)
#
# ordered_curve_d.append(first_pt_t)
# ordered_curve_d.insert(0, last_pt_t)
#
# return ordered_curve_t, ordered_curve_d
#
# def __generate_joint(self, joint):
# #joint = (np.array(joint).reshape(-1)).tolist()
# #self.CV.append(joint)
# cc0 = np.array(joint[0][0])
# cc1 = np.array(joint[1][0])
# cc2 = np.array(joint[2][0])
#
# vec0 = cc0 - cc1
# vec1 = cc1 - cc2
#
# perp = np.cross(vec0, vec1)
#
#
# curvelistT = []
# curvelistD = []
#
# for vessel_connection in joint:
# ordered_curve_t, ordered_curve_d = self.__get_vessel_connection_curve(
# vessel_connection, perp, vec0, vec1)
#
#
#
# curvelistT.append(ordered_curve_t)
# curvelistD.append(ordered_curve_d)
# #print ' ', self.V[vertex_id], ' hp: ', hp
#
# Betacurve_id, Astart, Alphacurve_id, Bstart, Gammacurve_id, Cstart = self.__find_couples(curvelistT)
#
# #print 'ABC ', Betacurve_id, Astart, Alphacurve_id, Bstart
#
# dom2D = ip.TRIANGLE_DOMAIN(32, [[1,0,0],[0,1,0],[0,0,1]])
# Cab0 = BEZIER(S1)(self.__order_curve(curvelistT[Gammacurve_id][-1:0:-1], Cstart))
# Cbc0 = BEZIER(S1)(self.__order_curve(curvelistT[Alphacurve_id], Bstart))
# Cbc1 = BEZIER(S2)(self.__order_curve(curvelistT[Alphacurve_id], Bstart))
# Cca0 = BEZIER(S1)(self.__order_curve(curvelistT[Betacurve_id][-1:0:-1], Astart))
#
# out1 = MAP(ip.TRIANGULAR_COONS_PATCH([Cab0,Cbc1,Cca0]))(STRUCT(dom2D))
# self.joints_lar.append(out1)
#
# Betacurve_id, Astart, Alphacurve_id, Bstart, Gammacurve_id, Cstart = self.__find_couples(curvelistD)
#
# #print 'ABC ', Betacurve_id, Astart, Alphacurve_id, Bstart
#
# dom2D = ip.TRIANGLE_DOMAIN(32, [[1,0,0],[0,1,0],[0,0,1]])
# Cab0 = BEZIER(S1)(self.__order_curve(curvelistD[Gammacurve_id][-1:0:-1], Cstart))
# Cbc0 = BEZIER(S1)(self.__order_curve(curvelistD[Alphacurve_id], Bstart))
# Cbc1 = BEZIER(S2)(self.__order_curve(curvelistD[Alphacurve_id], Bstart))
# Cca0 = BEZIER(S1)(self.__order_curve(curvelistD[Betacurve_id][-1:0:-1], Astart))
#
# out2 = MAP(ip.TRIANGULAR_COONS_PATCH([Cab0,Cbc1,Cca0]))(STRUCT(dom2D))
# self.joints_lar.append(out2)
#
# def __find_couples(self, curvelist):
# """
# try find all posible couples with minimal energy.
# Energy is defined like sum of distances
# """
# energy = None
# mn_ind = None
# output = None
# for i in range(0,3):
# Betacurve_id, Astart, dist0 = self.__find_nearest(
# curvelist, i, 0, [i])
# Alphacurve_id, Bstart, dist1 = self.__find_nearest(
# curvelist, i, -1, [i, Betacurve_id])
# this_energy = dist0 + dist1
#
# if energy is None or this_energy < energy:
# energy = this_energy
# mn_ind = i
# #Gammacurve_id = i
# output = Betacurve_id, Astart, Alphacurve_id, Bstart, i, 0
#
#
# Betacurve_id, Astart, dist0 = self.__find_nearest(
# curvelist, i, -1, [i])
# Alphacurve_id, Bstart, dist1 = self.__find_nearest(
# curvelist, i, 0, [i, Betacurve_id])
# this_energy = dist0 + dist1
#
# if energy is None or this_energy < energy:
# energy = this_energy
# mn_ind = i
# output = Betacurve_id, Astart, Alphacurve_id, Bstart, i, -1
#
# print 'output'
# print output
#
# return output
#
#
# def __order_curve(self, curve, start):
# if start is 0:
# return curve
# else:
# return curve[-1:0:-1]
#
# def __find_nearest(self, curvelist, this_curve_index, start, wrong_curve=None):
# """
# start: use 0 or -1
# """
# #if start:
# # start_index = 0
# #else:
# # start_index = -1
# if wrong_curve is None:
# wrong_curve = [this_curve_index]
# dist = None
# min_cv_ind = None
# min_cv_start = None
#
# for curve_index in range(0, len(curvelist)):
# if curve_index not in wrong_curve:
# pt0 = np.array(curvelist[this_curve_index][start])
# pt1 = np.array(curvelist[curve_index][0])
# this_dist = np.linalg.norm(pt0 - pt1)
# if (dist is None) or (this_dist < dist):
# dist = this_dist
# min_cv_ind = curve_index
# min_cv_start = 0
#
# pt1 = np.array(curvelist[curve_index][-1])
# this_dist = np.linalg.norm(pt0 - pt1)
# if (dist is None) or (this_dist < dist):
# dist = this_dist
# min_cv_ind = curve_index
# min_cv_start = -1
#
# return min_cv_ind, min_cv_start, dist
#
#
#
#
#
# def show(self):
#
# V = self.V
# CV = self.CV
#
# # V = [[0,0,0],[5,5,1],[0,5,5],[5,5,5]]
# # CV = [[0,1,2,3]]
# #for joint in self.joints_lar:
#
# # out = STRUCT([MKPOL([V, AA(AA(lambda k:k + 1))(CV), []])] + self.joints_lar)
# out = STRUCT(self.joints_lar)
# #VIEW(self.joints_lar[0])
# #VIEW(MKPOL([V, AA(AA(lambda k:k + 1))(CV), []]))
# VIEW(out)
# def get_output(self):
# pass
#
# def __add_tetr(self, nodeB):
# """
# Creates tetrahedron in specified position.
# """
# try:
# nodeB = nodeB.tolist()
# except:
# pass
#
# ln = len(self.V)
# self.V.append(nodeB)
# self.V.append((np.array(nodeB) + [2, 0, 0]).tolist())
# self.V.append((np.array(nodeB) + [2, 2, 0]).tolist())
# self.V.append((np.array(nodeB) + [2, 2, 2]).tolist())
# self.CV.append([ln, ln + 1, ln + 2, ln + 3])
#
# def __add_cone(self, nodeA, nodeB, radius):
# vect = (np.array(nodeA) - np.array(nodeB)).tolist()
# pts = self.__circle(nodeA, vect, radius)
#
# ln = len(self.V)
# self.V.append(nodeB)
# # first object is top of cone
# CVlist = [ln]
#
# for i, pt in enumerate(pts):
# self.V.append(pt)
# CVlist.append(ln + i + 1)
#
# self.CV.append(CVlist)
#
# def __add_circle(self, center, perp_vect, radius, polygon_element_number=10):
# """
# Draw circle some circle points as tetrahedrons.
# """
# pts = g3.circle(center, perp_vect, radius,
# polygon_element_number=polygon_element_number)
# for pt in pts:
# self.__add_tetr(pt)
| mjirik/imtools | imtools/gt_lar_smooth.py | Python | mit | 14,966 | 0.00254 |
# -*- coding: utf-8 -*-
# Simple Bot (SimpBot)
# Copyright 2016-2017, Ismael Lugo (kwargs)
import re
import sys
import ssl
import time
import logging
import socket
from six import binary_type
from six import string_types
from six import PY3 as python3
from six.moves import _thread
from six.moves import queue
from . import buffer
from . import features
from . import __version__
from .bottools import text
from . import localedata
from . import envvars
from .schedule import basic_scheduler as scheduler
i18n = localedata.get()
Logger = logging.getLogger('simpbot')
regexmsg = re.compile(
':(?P<mask>(?P<nick>.+)!(?P<user>.+)@(?P<host>[^ ]+)) '
'(?P<type>PRIVMSG|NOTICE) (?P<target>[^ ]+) :(?P<message>.+)', 2)
sche_name = lambda nw, nm: '{network}-{name}'.format(network=nw, name=nm)
lg_format = []
class client:
dispatcher_added = False
dispatcher_dict = {}
def __init__(self, netw, addr, port, nick, user, nickserv=None, sasl=None,
timeout=240, msgps=.5, wtime=30, servpass=None, prefix='!', lang=None,
plaintext={
'recv': ['msg', 'jpqk', 'mode'],
'send': ['msg', 'jpqk', 'mode']}):
# msg PRIVMSG, NOTICE
# jpqk JOIN, PART, QUIT, KICK
# mode CHANNEL AND USER MODES
# plane plain text
self.logger = logging.getLogger(netw)
if envvars.daemon is True:
fs = '%(asctime)s %(levelname)s: %(message)s'
handler = logging.FileHandler(envvars.logs.join(netw).lower(), 'a')
else:
fs = '%(levelname)s: irc-client(%(name)s): %(message)s'
handler = logging.StreamHandler(sys.stdout)
if not netw in lg_format:
handler.setFormatter(logging.Formatter(fs, None))
self.logger.addHandler(handler)
self.logger.propagate = 0
lg_format.append(netw)
self.connection_status = 'n'
self.input_alive = False
self.output_alive = False
self.lock = False
self.socket = None
self.input_buffer = None
self.output_buffer = queue.Queue()
self.features = features.FeatureSet()
self.plaintext = plaintext
self.default_lang = lang
self.dbstore = None
self.request = None
self.commands = None
self.autoconnect = False
self.conf_path = None
self.max_chars = 256
# IRC - Default
self.servname = netw
self.addr = addr
self.ssl = False
if isinstance(port, string_types):
if port.startswith('+'):
self.ssl = True
port = port.replace('+', '')
if port.isdigit():
port = int(port)
else:
port = 6667
elif isinstance(port, float) or isinstance(port, int):
port = int(port)
self.port = port
# IRC - Extra
self.servpass = servpass
self.nickserv = nickserv
self.usens = bool(self.nickserv)
self.sasl = sasl
self.timeout = timeout
self.msgps = msgps
self.wtime = wtime
self.prefix = prefix
std = sche_name(netw, 'std')
self.scheduler_std = scheduler(std, self, envvars.jobs.join(std))
self.scheduler_std.load()
self.scheduler_std.start()
ban = sche_name(netw, 'ban')
self.scheduler_ban = scheduler(ban, self, envvars.jobs.join(ban))
self.scheduler_ban.load()
self.scheduler_ban.start()
if nick == "" or nick[0].isdigit():
nick = text.randphras(l=7, upper=False, nofd=True)
if user == "" or user[0].isdigit():
user = text.randphras(l=7, upper=False, nofd=True)
self.nickname = nick
self.username = user
def __del__(self):
self.disconnect()
self.set_status('s')
if self.dbstore:
self.dbstore.save()
def set_status(self, modes):
"""
mode:
n: No connected
c: Connected
r: Connected and loged
p: Concection lost
d: Disconnected
"""
self.connection_status = modes[0]
def connect(self, servpass=None, attempts=0):
if not self.connection_status in 'np':
return
attempt = 0
while attempt <= attempts:
try:
self.socket = socket.socket()
self.socket.settimeout(self.timeout)
self.input_buffer = buffer.LineBuffer()
if self.ssl:
self.socket = ssl.wrap_socket(self.socket)
self.socket.connect((self.addr, self.port))
self.set_status('c')
break
except Exception as error:
self.logger.error(i18n['connection failure'],
self.addr, self.port, str(error))
self.logger.info(i18n['retrying connect'] % self.wtime)
time.sleep(self.wtime)
if attempts == 1:
attempt += 2
elif attempts > 0:
attempt += 1
else:
return True
remote_addr = self.socket.getpeername()[0]
self.logger.info(i18n['connected'], self.addr, remote_addr, self.port)
if servpass is not None:
self.servpass = servpass
if self.servpass is not None:
self.passwd(self.servpass)
if self.nickserv is None:
return
elif self.sasl:
# Simple Authentication and Security Layer (SASL) - RFC 4422
# Copyright (C) The Internet Society (2006).
pw = '{0}\0{0}\0{1}'.format(self.nickserv[0], self.nickserv[1])
self.send_raw('AUTHENTICATE PLAIN')
self.send_raw('AUTHENTICATE ' + pw.encode('base64'))
def check_plaintext(self, pos, opt):
if pos in self.plaintext:
if 'all' in self.plaintext[pos]:
return False
return opt in self.plaintext[pos]
else:
return False
@property
def connected(self):
return self.connection_status == 'c' or self.connection_status == 'r'
def reconnect(self, msg=""):
self.disconnect(msg)
if self.connection_status == 'd':
self.set_status('n')
self.try_connect()
def disconnect(self, msg=""):
if self.connection_status in 'cr':
self.quit(msg)
time.sleep(2.5)
self.set_status('d')
if self.socket:
try:
self.socket.close()
except:
pass
if self.request:
self.request.reset()
self.output_buffer.put(0)
def login(self):
self.user(self.username, self.nickname)
self.nick(self.nickname)
def output(self):
while self.connection_status in 'crp':
self.output_alive = True
try:
text = self.output_buffer.get(timeout=self.timeout)
except queue.Empty:
if self.connection_status in 'cr':
self.set_status('p')
self.try_connect()
if text == 0:
break
if isinstance(text, string_types):
if python3:
message = text.encode() + b'\r\n'
else:
message = text + '\r\n'
else:
self.logger.warning(i18n['invalid message'])
continue
if len(text) > 512:
self.logger.warrning(i18n['invalid message size'])
continue
try:
self.socket.send(message)
except socket.error:
if self.connection_status in 'cr':
self.set_status('p')
self.try_connect()
if 'send' in self.plaintext and 'all' in self.plaintext['send']:
self.logger.info(i18n['output'], text)
time.sleep(self.msgps)
else:
self.input_alive = False
def input(self):
while self.connection_status in 'crp':
self.input_alive = True
try:
recv = self.socket.recv(1024)
except socket.timeout:
self.logger.error(i18n['connection timeout'],
self.servname, self.timeout)
if self.connection_status in 'cr':
self.set_status('p')
self.try_connect()
continue
except socket.error:
if self.connection_status in 'cr':
self.set_status('p')
self.try_connect()
continue
else:
if recv == '':
if self.connection_status in 'cr':
self.set_status('p')
self.try_connect()
continue
self.input_buffer.feed(recv)
for line in self.input_buffer:
if python3 and isinstance(line, binary_type):
line = str(line, 'utf-8')
if not line:
continue
if 'recv' in self.plaintext and 'all' in self.plaintext['recv']:
self.logger.info(i18n['input'], line)
msg = regexmsg.match(line)
self.proccess_handlers(line)
if msg and self.commands:
self.commands.put(msg)
continue
else:
self.input_alive = False
def try_connect(self, attempts=0):
if not self.lock:
self.lock = True
else:
while self.lock:
time.sleep(1)
else:
return
if self.socket:
try:
self.socket.close()
except:
pass
if self.connect(attempts=attempts):
self.lock = False
return False
if not self.input_alive:
_thread.start_new(self.input, (), {})
if not self.output_alive:
_thread.start_new(self.output, (), {})
else:
while not self.output_buffer.empty():
self.output_buffer.get()
self.login()
self.lock = False
def proccess_handlers(self, text):
for handler in self.handlers:
SRE_Match = handler['match'](text)
if SRE_Match is not None:
try:
exec_res = handler['func'](self, SRE_Match.group)
except Exception as err:
Logger.error('Handler(%s) Exception: %s' % (
handler['func'].__name__, repr(err)))
else:
if exec_res is None:
return
else:
continue
def send_raw(self, text):
if self.connection_status in 'npds':
return
self.output_buffer.put(text)
##########################################################################
# Comandos IRC #
##########################################################################
@text.normalize
def ctcp(self, ctcptype, target, parameter=""):
tmpl = (
"\001{ctcptype} {parameter}\001" if parameter else
"\001{ctcptype}\001"
)
self.privmsg(target, tmpl.format(**vars()))
@text.normalize
def ctcp_reply(self, target, parameter):
self.notice(target, "\001%s\001" % parameter)
@text.normalize
def join(self, channel, key=""):
self.send_raw("JOIN %s%s" % (channel, (key and (" " + key))))
@text.normalize
def kick(self, channel, nick, comment=""):
tmpl = "KICK {channel} {nick}"
if comment:
tmpl += " :{comment}"
self.send_raw(tmpl.format(**vars()))
@text.normalize
def invite(self, nick, channel):
self.send_raw(" ".join(["INVITE", nick, channel]).strip())
@text.normalize
def nick(self, newnick):
self.send_raw("NICK " + newnick)
@text.normalize
def notice(self, target, msg):
form = '%s -> <%s> %s'
for line in msg.splitlines():
if len(line) <= self.max_chars:
if self.check_plaintext('send', 'msg'):
self.logger.info(form % (self.nickname, target, line))
self.send_raw("NOTICE %s :%s" % (target, line))
else:
for subline in text.part(line, self.max_chars):
if self.check_plaintext('send', 'msg'):
self.logger.info(form % (self.nickname, target, subline))
self.send_raw("NOTICE %s :%s" % (target, subline))
@text.normalize
def part(self, channels, message=""):
self.send_raw("PART %s%s" % (channels, (message and (" " + message))))
@text.normalize
def privmsg(self, target, msg):
form = '%s -> <%s> %s'
for line in msg.splitlines():
if len(line) <= self.max_chars:
if self.check_plaintext('send', 'msg'):
self.logger.info(form % (self.nickname, target, line))
self.send_raw("PRIVMSG %s :%s" % (target, line))
else:
for subline in text.part(line, self.max_chars):
if self.check_plaintext('send', 'msg'):
self.logger.info(form % (self.nickname, target, subline))
self.send_raw("PRIVMSG %s :%s" % (target, subline))
@text.normalize
def msg(self, target, text):
self.notice(target, text)
@text.normalize
def mode(self, target, command):
self.send_raw("MODE %s %s" % (target, command))
def verbose(self, capab, text):
if not self.dbstore or not self.connection_status in 'r':
return
capab = 'verbose:' + capab
ison = []
for user in self.dbstore.admins_list():
if user.admin.has_capab(capab):
ison.extend(user.admin.ison)
for target in ison:
self.notice(target, text)
@text.normalize
def error(self, target, msg):
for line in text.part(msg, 256, '... '):
self.send_raw("NOTICE %s :[ERROR]: %s" % (target, line))
@text.normalize
def passwd(self, password):
self.send_raw("PASS " + password)
def pong(self, target, target2=""):
self.send_raw("PONG %s%s" % (target, target2 and (" " + target2)))
@text.normalize
def remove(self, channel, nick, comment=""):
tmpl = "REMOVE {channel} {nick}"
if comment:
tmpl += " :{comment}"
self.send_raw(tmpl.format(**vars()))
def who(self, target):
if self.request:
self.request.who(target)
else:
self._who(target)
@text.normalize
def _who(self, target="", op=""):
self.send_raw("WHO%s%s" % (target and (" " + target), op and (" o")))
def whois(self, target):
if self.request:
self.request.whois(target)
else:
self._whois(target)
@text.normalize
def _whois(self, targets):
self.send_raw("WHOIS " + ",".join(targets.replace(',', '').split()))
@text.normalize
def topic(self, channel, new_topic=None):
if new_topic is None:
self.send_raw("TOPIC " + channel)
else:
self.send_raw("TOPIC %s :%s" % (channel, new_topic))
@text.normalize
def user(self, username, realname):
self.send_raw("USER %s 0 * :%s" % (username, realname))
@text.normalize
def quit(self, message=""):
if message == "":
message = 'SimpBot v' + __version__
self.send_raw("QUIT" + (message and (" :" + message)))
| IsmaelRLG/simpbot | simpbot/irc.py | Python | mit | 16,036 | 0.000873 |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from ansible.module_utils.network_common import to_list
_DEVICE_CONFIGS = {}
def get_config(module, flags=[]):
cmd = 'show running-config '
cmd += ' '.join(flags)
cmd = cmd.strip()
try:
return _DEVICE_CONFIGS[cmd]
except KeyError:
rc, out, err = module.exec_command(cmd)
if rc != 0:
module.fail_json(msg='unable to retrieve current config', stderr=err)
cfg = str(out).strip()
_DEVICE_CONFIGS[cmd] = cfg
return cfg
def run_commands(module, commands, check_rc=True):
responses = list()
for cmd in to_list(commands):
rc, out, err = module.exec_command(cmd)
if check_rc and rc != 0:
module.fail_json(msg=err, rc=rc)
responses.append(out)
return responses
def load_config(module, commands):
assert isinstance(commands, list), 'commands must be a list'
rc, out, err = module.exec_command('configure terminal')
if rc != 0:
module.fail_json(msg='unable to enter configuration mode', err=err)
for command in commands:
if command == 'end':
continue
rc, out, err = module.exec_command(command)
if rc != 0:
module.fail_json(msg=err, command=command, rc=rc)
module.exec_command('end')
| jcftang/ansible | lib/ansible/module_utils/ios.py | Python | gpl-3.0 | 2,926 | 0.005126 |
import numpy as np
from imreg_dph import AffineTransform
from nose.tools import *
def test_translation():
"""make sure tranlation works"""
# AffineTransform Tests
af1 = AffineTransform(translation=(1, 2))
af2 = AffineTransform(translation=(5, 3))
af3 = af1 @ af2
assert np.array_equal(af3.translation, (6, 5))
assert af3 == af2 @ af1
def test_rotation():
"""Test that rotation works"""
af1 = AffineTransform(rotation=2)
af2 = AffineTransform(rotation=1)
af3 = af1 @ af2
assert af3.rotation == 3
assert af3 == af2 @ af1
| david-hoffman/scripts | test_imreg_dph.py | Python | apache-2.0 | 575 | 0 |
from typing import Dict, Any
import torch
class Scheduler:
"""
A `Scheduler` is a generalization of PyTorch learning rate schedulers.
A scheduler can be used to update any field in an optimizer's parameter groups,
not just the learning rate.
During training using the AllenNLP `Trainer`, this is the API and calling
sequence for `step` and `step_batch`::
scheduler = ... # creates scheduler
batch_num_total = 0
for epoch in range(num_epochs):
for batch in batchs_in_epoch:
# compute loss, update parameters with current learning rates
# call step_batch AFTER updating parameters
batch_num_total += 1
scheduler.step_batch(batch_num_total)
# call step() at the END of each epoch
scheduler.step(validation_metrics, epoch)
"""
def __init__(
self, optimizer: torch.optim.Optimizer, param_group_field: str, last_epoch: int = -1
) -> None:
self.optimizer = optimizer
self.param_group_field = param_group_field
self._initial_param_group_field = f"initial_{param_group_field}"
if last_epoch == -1:
for i, group in enumerate(self.optimizer.param_groups):
if param_group_field not in group:
raise KeyError(f"{param_group_field} missing from param_groups[{i}]")
group.setdefault(self._initial_param_group_field, group[param_group_field])
else:
for i, group in enumerate(self.optimizer.param_groups):
if self._initial_param_group_field not in group:
raise KeyError(
f"{self._initial_param_group_field} missing from param_groups[{i}]"
)
self.base_values = [
group[self._initial_param_group_field] for group in self.optimizer.param_groups
]
self.last_epoch = last_epoch
def state_dict(self) -> Dict[str, Any]:
"""
Returns the state of the scheduler as a `dict`.
"""
return {key: value for key, value in self.__dict__.items() if key != "optimizer"}
def load_state_dict(self, state_dict: Dict[str, Any]) -> None:
"""
Load the schedulers state.
# Parameters
state_dict : `Dict[str, Any]`
Scheduler state. Should be an object returned from a call to `state_dict`.
"""
self.__dict__.update(state_dict)
def get_values(self):
raise NotImplementedError
def step(self, metric: float = None) -> None:
self.last_epoch += 1
self.metric = metric
for param_group, value in zip(self.optimizer.param_groups, self.get_values()):
param_group[self.param_group_field] = value
def step_batch(self, batch_num_total: int = None) -> None:
"""
By default, a scheduler is assumed to only update every epoch, not every batch.
So this does nothing unless it's overriden.
"""
return
| allenai/allennlp | allennlp/training/scheduler.py | Python | apache-2.0 | 3,040 | 0.003289 |
#!/usr/bin/env python
import os.path
import sys
DIRNAME = os.path.dirname(__file__)
if not DIRNAME in sys.path:
sys.path.append(DIRNAME)
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| luxnovalabs/enjigo_door | web_interface/keyedcache/test_app/manage.py | Python | unlicense | 668 | 0.005988 |
from flask import Flask, render_template
from flask.ext.bootstrap import Bootstrap
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.uploads import UploadSet, configure_uploads, IMAGES
from config import config
bootstrap = Bootstrap()
moment = Moment()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
avatars = UploadSet('avatars', IMAGES)
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
configure_uploads(app, (avatars))
from .main import main as main_blueprint
from .auth import auth as auth_blueprint
from .admin import admin as admin_blueprint
app.register_blueprint(main_blueprint)
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(admin_blueprint, url_prefix='/admin')
return app
| chenke91/ihaveablog | app/__init__.py | Python | mit | 1,126 | 0.000888 |
import unittest
from datetime import datetime
import pymongo
from mongoengine import *
from mongoengine.base import BaseField
from mongoengine.connection import _get_db
class DocumentTest(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = _get_db()
class Person(Document):
name = StringField()
age = IntField()
self.Person = Person
def test_drop_collection(self):
"""Ensure that the collection may be dropped from the database.
"""
self.Person(name='Test').save()
collection = self.Person._meta['collection']
self.assertTrue(collection in self.db.collection_names())
self.Person.drop_collection()
self.assertFalse(collection in self.db.collection_names())
def test_definition(self):
"""Ensure that document may be defined using fields.
"""
name_field = StringField()
age_field = IntField()
class Person(Document):
name = name_field
age = age_field
non_field = True
self.assertEqual(Person._fields['name'], name_field)
self.assertEqual(Person._fields['age'], age_field)
self.assertFalse('non_field' in Person._fields)
self.assertTrue('id' in Person._fields)
# Test iteration over fields
fields = list(Person())
self.assertTrue('name' in fields and 'age' in fields)
# Ensure Document isn't treated like an actual document
self.assertFalse(hasattr(Document, '_fields'))
def test_get_superclasses(self):
"""Ensure that the correct list of superclasses is assembled.
"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
mammal_superclasses = {'Animal': Animal}
self.assertEqual(Mammal._superclasses, mammal_superclasses)
dog_superclasses = {
'Animal': Animal,
'Animal.Mammal': Mammal,
}
self.assertEqual(Dog._superclasses, dog_superclasses)
def test_get_subclasses(self):
"""Ensure that the correct list of subclasses is retrieved by the
_get_subclasses method.
"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
mammal_subclasses = {
'Animal.Mammal.Dog': Dog,
'Animal.Mammal.Human': Human
}
self.assertEqual(Mammal._get_subclasses(), mammal_subclasses)
animal_subclasses = {
'Animal.Fish': Fish,
'Animal.Mammal': Mammal,
'Animal.Mammal.Dog': Dog,
'Animal.Mammal.Human': Human
}
self.assertEqual(Animal._get_subclasses(), animal_subclasses)
def test_polymorphic_queries(self):
"""Ensure that the correct subclasses are returned from a query"""
class Animal(Document): pass
class Fish(Animal): pass
class Mammal(Animal): pass
class Human(Mammal): pass
class Dog(Mammal): pass
Animal().save()
Fish().save()
Mammal().save()
Human().save()
Dog().save()
classes = [obj.__class__ for obj in Animal.objects]
self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog])
classes = [obj.__class__ for obj in Mammal.objects]
self.assertEqual(classes, [Mammal, Human, Dog])
classes = [obj.__class__ for obj in Human.objects]
self.assertEqual(classes, [Human])
Animal.drop_collection()
def test_inheritance(self):
"""Ensure that document may inherit fields from a superclass document.
"""
class Employee(self.Person):
salary = IntField()
self.assertTrue('name' in Employee._fields)
self.assertTrue('salary' in Employee._fields)
self.assertEqual(Employee._meta['collection'],
self.Person._meta['collection'])
# Ensure that MRO error is not raised
class A(Document): pass
class B(A): pass
class C(B): pass
def test_allow_inheritance(self):
"""Ensure that inheritance may be disabled on simple classes and that
_cls and _types will not be used.
"""
class Animal(Document):
meta = {'allow_inheritance': False}
name = StringField()
Animal.drop_collection()
def create_dog_class():
class Dog(Animal):
pass
self.assertRaises(ValueError, create_dog_class)
# Check that _cls etc aren't present on simple documents
dog = Animal(name='dog')
dog.save()
collection = self.db[Animal._meta['collection']]
obj = collection.find_one()
self.assertFalse('_cls' in obj)
self.assertFalse('_types' in obj)
Animal.drop_collection()
def create_employee_class():
class Employee(self.Person):
meta = {'allow_inheritance': False}
self.assertRaises(ValueError, create_employee_class)
# Test the same for embedded documents
class Comment(EmbeddedDocument):
content = StringField()
meta = {'allow_inheritance': False}
def create_special_comment():
class SpecialComment(Comment):
pass
self.assertRaises(ValueError, create_special_comment)
comment = Comment(content='test')
self.assertFalse('_cls' in comment.to_mongo())
self.assertFalse('_types' in comment.to_mongo())
def test_collection_name(self):
"""Ensure that a collection with a specified name may be used.
"""
collection = 'personCollTest'
if collection in self.db.collection_names():
self.db.drop_collection(collection)
class Person(Document):
name = StringField()
meta = {'collection': collection}
user = Person(name="Test User")
user.save()
self.assertTrue(collection in self.db.collection_names())
user_obj = self.db[collection].find_one()
self.assertEqual(user_obj['name'], "Test User")
user_obj = Person.objects[0]
self.assertEqual(user_obj.name, "Test User")
Person.drop_collection()
self.assertFalse(collection in self.db.collection_names())
def test_capped_collection(self):
"""Ensure that capped collections work properly.
"""
class Log(Document):
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 10,
'max_size': 90000,
}
Log.drop_collection()
# Ensure that the collection handles up to its maximum
for i in range(10):
Log().save()
self.assertEqual(len(Log.objects), 10)
# Check that extra documents don't increase the size
Log().save()
self.assertEqual(len(Log.objects), 10)
options = Log.objects._collection.options()
self.assertEqual(options['capped'], True)
self.assertEqual(options['max'], 10)
self.assertEqual(options['size'], 90000)
# Check that the document cannot be redefined with different options
def recreate_log_document():
class Log(Document):
date = DateTimeField(default=datetime.now)
meta = {
'max_documents': 11,
}
# Create the collection by accessing Document.objects
Log.objects
self.assertRaises(InvalidCollectionError, recreate_log_document)
Log.drop_collection()
def test_indexes(self):
"""Ensure that indexes are used when meta[indexes] is specified.
"""
class BlogPost(Document):
date = DateTimeField(db_field='addDate', default=datetime.now)
category = StringField()
tags = ListField(StringField())
meta = {
'indexes': [
'-date',
'tags',
('category', '-date')
],
}
BlogPost.drop_collection()
info = BlogPost.objects._collection.index_information()
# _id, types, '-date', 'tags', ('cat', 'date')
self.assertEqual(len(info), 5)
# Indexes are lazy so use list() to perform query
list(BlogPost.objects)
info = BlogPost.objects._collection.index_information()
self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]
in info.values())
self.assertTrue([('_types', 1), ('addDate', -1)] in info.values())
# tags is a list field so it shouldn't have _types in the index
self.assertTrue([('tags', 1)] in info.values())
class ExtendedBlogPost(BlogPost):
title = StringField()
meta = {'indexes': ['title']}
BlogPost.drop_collection()
list(ExtendedBlogPost.objects)
info = ExtendedBlogPost.objects._collection.index_information()
self.assertTrue([('_types', 1), ('category', 1), ('addDate', -1)]
in info.values())
self.assertTrue([('_types', 1), ('addDate', -1)] in info.values())
self.assertTrue([('_types', 1), ('title', 1)] in info.values())
BlogPost.drop_collection()
def test_unique(self):
"""Ensure that uniqueness constraints are applied to fields.
"""
class BlogPost(Document):
title = StringField()
slug = StringField(unique=True)
BlogPost.drop_collection()
post1 = BlogPost(title='test1', slug='test')
post1.save()
# Two posts with the same slug is not allowed
post2 = BlogPost(title='test2', slug='test')
self.assertRaises(OperationError, post2.save)
class Date(EmbeddedDocument):
year = IntField(db_field='yr')
class BlogPost(Document):
title = StringField()
date = EmbeddedDocumentField(Date)
slug = StringField(unique_with='date.year')
BlogPost.drop_collection()
post1 = BlogPost(title='test1', date=Date(year=2009), slug='test')
post1.save()
# day is different so won't raise exception
post2 = BlogPost(title='test2', date=Date(year=2010), slug='test')
post2.save()
# Now there will be two docs with the same slug and the same day: fail
post3 = BlogPost(title='test3', date=Date(year=2010), slug='test')
self.assertRaises(OperationError, post3.save)
BlogPost.drop_collection()
def test_custom_id_field(self):
"""Ensure that documents may be created with custom primary keys.
"""
class User(Document):
username = StringField(primary_key=True)
name = StringField()
User.drop_collection()
self.assertEqual(User._fields['username'].db_field, '_id')
self.assertEqual(User._meta['id_field'], 'username')
def create_invalid_user():
User(name='test').save() # no primary key field
self.assertRaises(ValidationError, create_invalid_user)
def define_invalid_user():
class EmailUser(User):
email = StringField(primary_key=True)
self.assertRaises(ValueError, define_invalid_user)
user = User(username='test', name='test user')
user.save()
user_obj = User.objects.first()
self.assertEqual(user_obj.id, 'test')
user_son = User.objects._collection.find_one()
self.assertEqual(user_son['_id'], 'test')
self.assertTrue('username' not in user_son['_id'])
User.drop_collection()
def test_creation(self):
"""Ensure that document may be created using keyword arguments.
"""
person = self.Person(name="Test User", age=30)
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 30)
def test_reload(self):
"""Ensure that attributes may be reloaded.
"""
person = self.Person(name="Test User", age=20)
person.save()
person_obj = self.Person.objects.first()
person_obj.name = "Mr Test User"
person_obj.age = 21
person_obj.save()
self.assertEqual(person.name, "Test User")
self.assertEqual(person.age, 20)
person.reload()
self.assertEqual(person.name, "Mr Test User")
self.assertEqual(person.age, 21)
def test_dictionary_access(self):
"""Ensure that dictionary-style field access works properly.
"""
person = self.Person(name='Test User', age=30)
self.assertEquals(person['name'], 'Test User')
self.assertRaises(KeyError, person.__getitem__, 'salary')
self.assertRaises(KeyError, person.__setitem__, 'salary', 50)
person['name'] = 'Another User'
self.assertEquals(person['name'], 'Another User')
# Length = length(assigned fields + id)
self.assertEquals(len(person), 3)
self.assertTrue('age' in person)
person.age = None
self.assertFalse('age' in person)
self.assertFalse('nationality' in person)
def test_embedded_document(self):
"""Ensure that embedded documents are set up correctly.
"""
class Comment(EmbeddedDocument):
content = StringField()
self.assertTrue('content' in Comment._fields)
self.assertFalse('id' in Comment._fields)
self.assertFalse('collection' in Comment._meta)
def test_embedded_document_validation(self):
"""Ensure that embedded documents may be validated.
"""
class Comment(EmbeddedDocument):
date = DateTimeField()
content = StringField(required=True)
comment = Comment()
self.assertRaises(ValidationError, comment.validate)
comment.content = 'test'
comment.validate()
comment.date = 4
self.assertRaises(ValidationError, comment.validate)
comment.date = datetime.now()
comment.validate()
def test_save(self):
"""Ensure that a document may be saved in the database.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30)
person.save()
# Ensure that the object is in the database
collection = self.db[self.Person._meta['collection']]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(person_obj['name'], 'Test User')
self.assertEqual(person_obj['age'], 30)
self.assertEqual(person_obj['_id'], person.id)
def test_delete(self):
"""Ensure that document may be deleted using the delete method.
"""
person = self.Person(name="Test User", age=30)
person.save()
self.assertEqual(len(self.Person.objects), 1)
person.delete()
self.assertEqual(len(self.Person.objects), 0)
def test_save_custom_id(self):
"""Ensure that a document may be saved with a custom _id.
"""
# Create person object and save it to the database
person = self.Person(name='Test User', age=30,
id='497ce96f395f2f052a494fd4')
person.save()
# Ensure that the object is in the database with the correct _id
collection = self.db[self.Person._meta['collection']]
person_obj = collection.find_one({'name': 'Test User'})
self.assertEqual(str(person_obj['_id']), '497ce96f395f2f052a494fd4')
def test_save_list(self):
"""Ensure that a list field may be properly saved.
"""
class Comment(EmbeddedDocument):
content = StringField()
class BlogPost(Document):
content = StringField()
comments = ListField(EmbeddedDocumentField(Comment))
tags = ListField(StringField())
BlogPost.drop_collection()
post = BlogPost(content='Went for a walk today...')
post.tags = tags = ['fun', 'leisure']
comments = [Comment(content='Good for you'), Comment(content='Yay.')]
post.comments = comments
post.save()
collection = self.db[BlogPost._meta['collection']]
post_obj = collection.find_one()
self.assertEqual(post_obj['tags'], tags)
for comment_obj, comment in zip(post_obj['comments'], comments):
self.assertEqual(comment_obj['content'], comment['content'])
BlogPost.drop_collection()
def test_save_embedded_document(self):
"""Ensure that a document with an embedded document field may be
saved in the database.
"""
class EmployeeDetails(EmbeddedDocument):
position = StringField()
class Employee(self.Person):
salary = IntField()
details = EmbeddedDocumentField(EmployeeDetails)
# Create employee object and save it to the database
employee = Employee(name='Test Employee', age=50, salary=20000)
employee.details = EmployeeDetails(position='Developer')
employee.save()
# Ensure that the object is in the database
collection = self.db[self.Person._meta['collection']]
employee_obj = collection.find_one({'name': 'Test Employee'})
self.assertEqual(employee_obj['name'], 'Test Employee')
self.assertEqual(employee_obj['age'], 50)
# Ensure that the 'details' embedded object saved correctly
self.assertEqual(employee_obj['details']['position'], 'Developer')
def test_save_reference(self):
"""Ensure that a document reference field may be saved in the database.
"""
class BlogPost(Document):
meta = {'collection': 'blogpost_1'}
content = StringField()
author = ReferenceField(self.Person)
BlogPost.drop_collection()
author = self.Person(name='Test User')
author.save()
post = BlogPost(content='Watched some TV today... how exciting.')
# Should only reference author when saving
post.author = author
post.save()
post_obj = BlogPost.objects.first()
# Test laziness
self.assertTrue(isinstance(post_obj._data['author'],
pymongo.dbref.DBRef))
self.assertTrue(isinstance(post_obj.author, self.Person))
self.assertEqual(post_obj.author.name, 'Test User')
# Ensure that the dereferenced object may be changed and saved
post_obj.author.age = 25
post_obj.author.save()
author = list(self.Person.objects(name='Test User'))[-1]
self.assertEqual(author.age, 25)
BlogPost.drop_collection()
def tearDown(self):
self.Person.drop_collection()
if __name__ == '__main__':
unittest.main()
| alex/mongoengine | tests/document.py | Python | mit | 19,227 | 0.002236 |
#!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Prints the results of an Address record lookup, Mail-Exchanger record
lookup, and Nameserver record lookup for the given hostname for a
given hostname.
To run this script:
$ python testdns.py <hostname>
e.g.:
$ python testdns.py www.google.com
"""
import sys
from twisted.names import client
from twisted.internet import defer, reactor
from twisted.names import dns, error
r = client.Resolver('/etc/resolv.conf')
def formatResult(a, heading):
answer, authority, additional = a
lines = ['# ' + heading]
for a in answer:
line = [
a.name,
dns.QUERY_CLASSES.get(a.cls, 'UNKNOWN (%d)' % (a.cls,)),
a.payload]
lines.append(' '.join(str(word) for word in line))
return '\n'.join(line for line in lines)
def printError(f):
f.trap(defer.FirstError)
f = f.value.subFailure
f.trap(error.DomainError)
print f.value.__class__.__name__, f.value.message.queries
def printResults(res):
for r in res:
print r
print
if __name__ == '__main__':
domainname = sys.argv[1]
d = defer.gatherResults([
r.lookupAddress(domainname).addCallback(
formatResult, 'Addresses'),
r.lookupMailExchange(domainname).addCallback(
formatResult, 'Mail Exchangers'),
r.lookupNameservers(domainname).addCallback(
formatResult, 'Nameservers'),
], consumeErrors=True)
d.addCallbacks(printResults, printError)
d.addBoth(lambda ign: reactor.stop())
reactor.run()
| kernel-sanders/arsenic-mobile | Dependencies/Twisted-13.0.0/doc/names/examples/testdns.py | Python | gpl-3.0 | 1,653 | 0 |
# Copyright (C) 2021 OpenMotics BV
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
apartment controller manages the apartment objects that are known in the system
"""
import logging
from gateway.events import EsafeEvent, EventError
from gateway.exceptions import ItemDoesNotExistException, StateException
from gateway.models import Apartment, Database
from gateway.mappers import ApartmentMapper
from gateway.dto import ApartmentDTO
from gateway.pubsub import PubSub
from ioc import INJECTED, Inject, Injectable, Singleton
if False: # MyPy
from typing import List, Optional, Dict, Any
from esafe.rebus import RebusController
logger = logging.getLogger(__name__)
@Injectable.named('apartment_controller')
@Singleton
class ApartmentController(object):
def __init__(self):
self.rebus_controller = None # type: Optional[RebusController]
def set_rebus_controller(self, rebus_controller):
self.rebus_controller = rebus_controller
@staticmethod
@Inject
def send_config_change_event(msg, error=EventError.ErrorTypes.NO_ERROR, pubsub=INJECTED):
# type: (str, Dict[str, Any], PubSub) -> None
event = EsafeEvent(EsafeEvent.Types.CONFIG_CHANGE, {'type': 'apartment', 'msg': msg}, error=error)
pubsub.publish_esafe_event(PubSub.EsafeTopics.CONFIG, event)
@staticmethod
def load_apartment(apartment_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.id == apartment_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartment_by_mailbox_id(mailbox_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.mailbox_rebus_id == mailbox_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartment_by_doorbell_id(doorbell_id):
# type: (int) -> Optional[ApartmentDTO]
apartment_orm = Apartment.select().where(Apartment.doorbell_rebus_id == doorbell_id).first()
if apartment_orm is None:
return None
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
return apartment_dto
@staticmethod
def load_apartments():
# type: () -> List[ApartmentDTO]
apartments = []
for apartment_orm in Apartment.select():
apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
apartments.append(apartment_dto)
return apartments
@staticmethod
def get_apartment_count():
# type: () -> int
return Apartment.select().count()
@staticmethod
def apartment_id_exists(apartment_id):
# type: (int) -> bool
apartments = ApartmentController.load_apartments()
ids = (x.id for x in apartments)
return apartment_id in ids
def _check_rebus_ids(self, apartment_dto):
if self.rebus_controller is None:
raise StateException("Cannot save apartment: Rebus Controller is None")
if 'doorbell_rebus_id' in apartment_dto.loaded_fields and \
not self.rebus_controller.verify_device_exists(apartment_dto.doorbell_rebus_id):
raise ItemDoesNotExistException("Cannot save apartment: doorbell ({}) does not exists".format(apartment_dto.doorbell_rebus_id))
if 'mailbox_rebus_id' in apartment_dto.loaded_fields and \
not self.rebus_controller.verify_device_exists(apartment_dto.mailbox_rebus_id):
raise ItemDoesNotExistException("Cannot save apartment: mailbox ({}) does not exists".format(apartment_dto.mailbox_rebus_id))
def save_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
apartment_orm = ApartmentMapper.dto_to_orm(apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('save')
return ApartmentMapper.orm_to_dto(apartment_orm)
def save_apartments(self, apartments_dto):
apartments_dtos = []
for apartment in apartments_dto:
apartment_saved = self.save_apartment(apartment, send_event=False)
apartments_dtos.append(apartment_saved)
self.send_config_change_event('save')
return apartments_dtos
def update_apartment(self, apartment_dto, send_event=True):
# type: (ApartmentDTO, bool) -> ApartmentDTO
self._check_rebus_ids(apartment_dto)
if 'id' not in apartment_dto.loaded_fields or apartment_dto.id is None:
raise RuntimeError('cannot update an apartment without the id being set')
try:
apartment_orm = Apartment.get_by_id(apartment_dto.id)
loaded_apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm)
for field in apartment_dto.loaded_fields:
if field == 'id':
continue
if hasattr(apartment_dto, field):
setattr(loaded_apartment_dto, field, getattr(apartment_dto, field))
apartment_orm = ApartmentMapper.dto_to_orm(loaded_apartment_dto)
apartment_orm.save()
if send_event:
ApartmentController.send_config_change_event('update')
return ApartmentMapper.orm_to_dto(apartment_orm)
except Exception as e:
raise RuntimeError('Could not update the user: {}'.format(e))
def update_apartments(self, apartment_dtos):
# type: (List[ApartmentDTO]) -> Optional[List[ApartmentDTO]]
apartments = []
with Database.get_db().transaction() as transaction:
try:
# First clear all the rebus fields in order to be able to swap 2 fields
for apartment in apartment_dtos:
apartment_orm = Apartment.get_by_id(apartment.id) # type: Apartment
if 'mailbox_rebus_id' in apartment.loaded_fields:
apartment_orm.mailbox_rebus_id = None
if 'doorbell_rebus_id' in apartment.loaded_fields:
apartment_orm.doorbell_rebus_id = None
apartment_orm.save()
# Then check if there is already an apartment with an mailbox or doorbell rebus id that is passed
# This is needed for when an doorbell or mailbox gets assigned to another apartment. Then the first assignment needs to be deleted.
for apartment_orm in Apartment.select():
for apartment_dto in apartment_dtos:
if apartment_orm.mailbox_rebus_id == apartment_dto.mailbox_rebus_id and apartment_orm.mailbox_rebus_id is not None:
apartment_orm.mailbox_rebus_id = None
apartment_orm.save()
if apartment_orm.doorbell_rebus_id == apartment_dto.doorbell_rebus_id and apartment_orm.doorbell_rebus_id is not None:
apartment_orm.doorbell_rebus_id = None
apartment_orm.save()
for apartment in apartment_dtos:
updated = self.update_apartment(apartment, send_event=False)
if updated is not None:
apartments.append(updated)
self.send_config_change_event('update')
except Exception as ex:
logger.error('Could not update apartments: {}: {}'.format(type(ex).__name__, ex))
transaction.rollback()
return None
return apartments
@staticmethod
def delete_apartment(apartment_dto):
# type: (ApartmentDTO) -> None
if "id" in apartment_dto.loaded_fields and apartment_dto.id is not None:
Apartment.delete_by_id(apartment_dto.id)
elif "name" in apartment_dto.loaded_fields:
# First check if there is only one:
if Apartment.select().where(Apartment.name == apartment_dto.name).count() <= 1:
Apartment.delete().where(Apartment.name == apartment_dto.name).execute()
ApartmentController.send_config_change_event('delete')
else:
raise RuntimeError('More than one apartment with the given name: {}'.format(apartment_dto.name))
else:
raise RuntimeError('Could not find an apartment with the name {} to delete'.format(apartment_dto.name))
| openmotics/gateway | src/gateway/apartment_controller.py | Python | agpl-3.0 | 9,287 | 0.0028 |
{% block meta %}
name: Base
description: SMACH base template.
language: Python
framework: SMACH
type: Base
tags: [core]
includes: []
extends: []
variables:
- - manifest:
description: ROS manifest name.
type: str
- - node_name:
description: ROS node name for the state machine.
type: str
- outcomes:
description: A list of possible outcomes of the state machine.
type: list
- - userdata:
description: Definitions for userdata needed by child states.
type: dict
- - function_name:
description: A name for the main executable state machine function.
type: str
input_keys: []
output_keys: []
{% endblock meta %}
{% from "Utils.tpl.py" import import_module, render_outcomes, render_userdata %}
{% set defined_headers = [] %}
{% set local_vars = [] %}
{% block base_header %}
#!/usr/bin/env python
{{ base_header }}
{% endblock base_header %}
{% block imports %}
{{ import_module(defined_headers, 'smach') }}
{{ imports }}
{% endblock imports %}
{% block defs %}
{{ defs }}
{% endblock defs %}
{% block class_defs %}
{{ class_defs }}
{% endblock class_defs %}
{% block cb_defs %}
{{ cb_defs }}
{% endblock cb_defs %}
{% if name is defined %}{% set sm_name = name | lower() %}{% else %}{% set sm_name = 'sm' %}{% endif %}
{% block main_def %}
def {% if function_name is defined %}{{ function_name | lower() }}{% else %}main{% endif %}():
{{ main_def | indent(4) }}
{% endblock main_def %}
{% block body %}
{{ sm_name }} = smach.StateMachine({{ render_outcomes(outcomes) }})
{# Container header insertion variable indexed by container state name #}
{% if name in header %}{{ header[name] | indent(4, true) }}{% endif %}
{# Render container userdata #}
{% if userdata is defined %}{{ render_userdata(name | lower(), userdata) | indent(4) }}{% endif %}
{# Render state userdata #}
{% if name in header_userdata %}{{ header_userdata[name] | indent(4, true) }}{% endif %}
with {{ sm_name }}:
{# Container body insertion variable #}
{{ body | indent(8) }}
{% endblock body %}
{% block footer %}
{{ footer | indent(8) }}
{% endblock footer %}
{% block execute %}
{{ execute | indent(4) }}
outcome = {{ sm_name }}.execute()
{% endblock execute %}
{% block base_footer %}
{{ base_footer | indent(4) }}
{% endblock base_footer %}
{% block main %}
if __name__ == '__main__':
{{ '' | indent(4, true) }}{% if function_name is defined %}{{ function_name | lower() }}{% else %}main{% endif %}()
{% endblock main %}
| ReconCell/smacha | smacha/src/smacha/templates/Base.tpl.py | Python | bsd-3-clause | 2,545 | 0.070334 |
Python 3.4.2 (default, Oct 19 2014, 13:31:11)
[GCC 4.9.1] on linux
Type "copyright", "credits" or "license()" for more information.
>>> print("hello Joe")
hello Joe
>>>
| davisjoe/joesrobotchallenge | expermients/hello Joe.py | Python | mit | 171 | 0.035088 |
__author__ = 'SmileyBarry'
from .core import APIConnection, SteamObject, store
from .decorators import cached_property, INFINITE
class SteamApp(SteamObject):
def __init__(self, appid, name=None, owner=None):
self._id = appid
if name is not None:
import time
self._cache = dict()
self._cache['name'] = (name, time.time())
# Normally, the associated userid is also the owner.
# That would not be the case if the game is borrowed, though. In that case, the object creator
# usually defines attributes accordingly. However, at this time we can't ask the API "is this
# game borrowed?", unless it's the actively-played game, so this distinction isn't done in the
# object's context, but in the object creator's context.
self._owner = owner
self._userid = self._owner
@cached_property(ttl=INFINITE)
def _schema(self):
return APIConnection().call("ISteamUserStats", "GetSchemaForGame", "v2", appid=self._id)
@property
def appid(self):
return self._id
@cached_property(ttl=INFINITE)
def achievements(self):
global_percentages = APIConnection().call("ISteamUserStats", "GetGlobalAchievementPercentagesForApp", "v0002",
gameid=self._id)
if self._userid is not None:
# Ah-ha, this game is associated to a user!
userid = self._userid
unlocks = APIConnection().call("ISteamUserStats",
"GetUserStatsForGame",
"v2",
appid=self._id,
steamid=userid)
if 'achievements' in unlocks.playerstats:
unlocks = [associated_achievement.name
for associated_achievement in unlocks.playerstats.achievements
if associated_achievement.achieved != 0]
else:
userid = None
unlocks = None
achievements_list = []
for achievement in self._schema.game.availableGameStats.achievements:
achievement_obj = SteamAchievement(self._id, achievement.name, achievement.displayName, userid)
achievement_obj._cache = {}
if achievement.hidden == 0:
store(achievement_obj, "is_hidden", False)
else:
store(achievement_obj, "is_hidden", True)
for global_achievement in global_percentages.achievementpercentages.achievements:
if global_achievement.name == achievement.name:
achievement_obj.unlock_percentage = global_achievement.percent
achievements_list += [achievement_obj]
if unlocks is not None:
for achievement in achievements_list:
if achievement.apiname in unlocks:
store(achievement, "is_achieved", True)
else:
store(achievement, "is_achieved", False)
return achievements_list
@cached_property(ttl=INFINITE)
def name(self):
return self._schema.game.gameName
@cached_property(ttl=INFINITE)
def owner(self):
if self._owner is None:
return self._userid
else:
return self._owner
def __str__(self):
return self.name
def __hash__(self):
# Don't just use the ID so ID collision between different types of objects wouldn't cause a match.
return hash(('app', self.id))
class SteamAchievement(SteamObject):
def __init__(self, linked_appid, apiname, displayname, linked_userid=None):
"""
Initialise a new instance of SteamAchievement. You shouldn't create one yourself, but from
"SteamApp.achievements" instead.
:param linked_appid: The AppID associated with this achievement.
:type linked_appid: int
:param apiname: The API-based name of this achievement. Usually a string.
:type apiname: str or unicode
:param displayname: The achievement's user-facing name.
:type displayname: str or unicode
:param linked_userid: The user ID this achievement is linked to.
:type linked_userid: int
:return: A new SteamAchievement instance.
"""
self._appid = linked_appid
self._id = apiname
self._displayname = displayname
self._userid = linked_userid
self.unlock_percentage = 0.0
def __hash__(self):
# Don't just use the ID so ID collision between different types of objects wouldn't cause a match.
return hash((self.id, self._appid))
@property
def appid(self):
return self._appid
@property
def name(self):
return self._displayname
@property
def apiname(self):
return self._id
@cached_property(ttl=INFINITE)
def is_hidden(self):
response = APIConnection().call("ISteamUserStats",
"GetSchemaForGame",
"v2",
appid=self._appid)
for achievement in response.game.availableGameStats.achievements:
if achievement.name == self._id:
if achievement.hidden == 0:
return False
else:
return True
@cached_property(ttl=INFINITE)
def is_unlocked(self):
if self._userid is None:
raise ValueError("No Steam ID linked to this achievement!")
response = APIConnection().call("ISteamUserStats",
"GetPlayerAchievements",
"v1",
steamid=self._userid,
appid=self._appid,
l="English")
for achievement in response.playerstats.achievements:
if achievement.apiname == self._id:
if achievement.achieved == 1:
return True
else:
return False
# Cannot be found.
return False | balohmatevz/steamapi | steamapi/app.py | Python | mit | 6,268 | 0.002234 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django.utils.timezone
import model_utils.fields
from django.db import migrations, models
from opaque_keys.edx.django.models import CourseKeyField, UsageKeyField
from lms.djangoapps.courseware.fields import UnsignedBigIntAutoField
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='PersistentSubsectionGrade',
fields=[
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('id', UnsignedBigIntAutoField(serialize=False, primary_key=True)),
('user_id', models.IntegerField()),
('course_id', CourseKeyField(max_length=255)),
('usage_key', UsageKeyField(max_length=255)),
('subtree_edited_date', models.DateTimeField(verbose_name=b'last content edit timestamp')),
('course_version', models.CharField(max_length=255, verbose_name=b'guid of latest course version', blank=True)),
('earned_all', models.FloatField()),
('possible_all', models.FloatField()),
('earned_graded', models.FloatField()),
('possible_graded', models.FloatField()),
],
),
migrations.CreateModel(
name='VisibleBlocks',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('blocks_json', models.TextField()),
('hashed', models.CharField(unique=True, max_length=100)),
],
),
migrations.AddField(
model_name='persistentsubsectiongrade',
name='visible_blocks',
field=models.ForeignKey(to='grades.VisibleBlocks', db_column=b'visible_blocks_hash', to_field=b'hashed', on_delete=models.CASCADE),
),
migrations.AlterUniqueTogether(
name='persistentsubsectiongrade',
unique_together=set([('course_id', 'user_id', 'usage_key')]),
),
]
| ESOedX/edx-platform | lms/djangoapps/grades/migrations/0001_initial.py | Python | agpl-3.0 | 2,347 | 0.002983 |
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
from lettuce import step, world
from common import *
############### ACTIONS ####################
@step('There are no courses$')
def no_courses(step):
world.clear_courses()
create_studio_user()
@step('I click the New Course button$')
def i_click_new_course(step):
world.css_click('.new-course-button')
@step('I fill in the new course information$')
def i_fill_in_a_new_course_information(step):
fill_in_course_info()
@step('I create a course with "([^"]*)", "([^"]*)", "([^"]*)", and "([^"]*)"')
def i_create_course(step, name, org, number, run):
fill_in_course_info(name=name, org=org, num=number, run=run)
@step('I create a new course$')
def i_create_a_course(step):
create_a_course()
@step('I click the course link in Studio Home$')
def i_click_the_course_link_in_studio_home(step): # pylint: disable=invalid-name
course_css = 'a.course-link'
world.css_click(course_css)
@step('I see an error about the length of the org/course/run tuple')
def i_see_error_about_length(step):
assert world.css_has_text(
'#course_creation_error',
'The combined length of the organization, course number, '
'and course run fields cannot be more than 65 characters.'
)
############ ASSERTIONS ###################
@step('the Courseware page has loaded in Studio$')
def courseware_page_has_loaded_in_studio(step):
course_title_css = 'span.course-title'
assert world.is_css_present(course_title_css)
@step('I see the course listed in Studio Home$')
def i_see_the_course_in_studio_home(step):
course_css = 'h3.class-title'
assert world.css_has_text(course_css, world.scenario_dict['COURSE'].display_name)
@step('I am on the "([^"]*)" tab$')
def i_am_on_tab(step, tab_name):
header_css = 'div.inner-wrapper h1'
assert world.css_has_text(header_css, tab_name)
@step('I see a link for adding a new section$')
def i_see_new_section_link(step):
link_css = '.outline .button-new'
assert world.css_has_text(link_css, 'New Section')
| Stanford-Online/edx-platform | cms/djangoapps/contentstore/features/courses.py | Python | agpl-3.0 | 2,137 | 0.001872 |
"""
This is an example showing how to use the mgd2d solver.
A 4th order accurate solution is obtained with the 5pt stencil,
by using deferred correction.
"""
import numpy as np
import time
from mgd2d import FMG,V_cycle
#analytical solution
def Uann(x,y,n):
return np.sin(2*n*np.pi*x)*np.sin(2*n*np.pi*y)
#RHS corresponding to above
def source(x,y,n):
return -8 * (np.pi)**2 * n**2 * np.sin(2*n*np.pi*x) * np.sin(2*n*np.pi*y)
#input
#input
#FMG is a direct solver. tolerance and iterations are not used
#nv = 1 # nv : Number of V-cycles within FMG. nv=1 will give solution a to within discretization error.
# Increase this to get a higher accuracy solution (upto roundoff limit)
# of the discrete problem.(residual on the fine grid =round off limit)
# Here I am using nv=2 for the first solve and nv=6 for the second solve.
nlevels = 7 #total number of grid levels. 1 means no multigrid, 2 means one coarse grid. etc
# Number of points is based on the number of multigrid levels as
# N=A*2**(num_levels-1) where A is an integer >=4. Smaller A is better
# This is a cell centered discretization
NX = 4*2**(nlevels-1)
NY = 4*2**(nlevels-1)
#the grid has one layer of ghost cells to help apply the boundary conditions
uann=np.zeros([NX+2,NY+2])#analytical solution
u =np.zeros([NX+2,NY+2])#approximation
f =np.zeros([NX+2,NY+2])#RHS
#for deferred correction
uxx = np.zeros_like(u)
corr = np.zeros_like(u)
#calcualte the RHS and exact solution
DX=1.0/NX
DY=1.0/NY
n=1 # number of waves in the solution
xc=np.linspace(0.5*DX,1-0.5*DX,NX)
yc=np.linspace(0.5*DY,1-0.5*DY,NY)
XX,YY=np.meshgrid(xc,yc,indexing='ij')
uann[1:NX+1,1:NY+1]=Uann(XX,YY,n)
f[1:NX+1,1:NY+1]=source(XX,YY,n)
print('mgd2d.py : Two Dimensional geometric multigrid solver')
print('NX:',NX,', NY:',NY,', levels: ',nlevels)
#start solving
tb=time.time()
u,res=FMG(NX,NY,nlevels,f,2)
error=np.abs(uann[1:NX+1,1:NY+1]-u[1:NX+1,1:NY+1])
print(' 2nd Order::L_inf (true error): ',np.max(np.max(error)))
print(' Elapsed time: ',time.time()-tb,' seconds')
print('Improving approximation using deferred correction')
#deferred correction
#refer Leveque, p63
Ax=1.0/DX**2
Ay=1.0/DY**2
for i in range(1,NX+1):
for j in range(1,NY+1):
uxx[i,j]=(u[i+1,j]+u[i-1,j] - 2*u[i,j])/DX**2
# we should be using one-sided difference formulae for values
# near the boundary. For simplicity I am just applying the
# condition known from the analytical form for these terms.
uxx[ 0,:] = -uxx[ 1,:]
uxx[-1,:] = -uxx[-2,:]
uxx[:, 0] = -uxx[:, 1]
uxx[:,-1] = -uxx[:,-2]
f[ 0,:] = -f[ 1,:]
f[-1,:] = -f[-2,:]
f[:, 0] = -f[:, 1]
f[:,-1] = -f[:,-2]
#correction term
# del2(f)-2*uxxyy
for i in range(1,NX+1):
for j in range(1,NY+1):
corr[i,j]=(Ax*(f[i+1,j]+f[i-1,j])+Ay*(f[i,j+1]+f[i,j-1])-2.0*(Ax+Ay)*f[i,j])-2*(uxx[i,j+1]+uxx[i,j-1] - 2*uxx[i,j])/DY**2
#adjust the RHS to cancel the leading order terms
for i in range(1,NX+1):
for j in range(1,NY+1):
f[i,j]+= 1.0/12*DX**2*(corr[i,j])
##solve once again with the new RHS
u,res=FMG(NX,NY,nlevels,f,5)
tf=time.time()
error=np.abs(uann[1:NX+1,1:NY+1]-u[1:NX+1,1:NY+1])
print(' 4nd Order::L_inf (true error): ',np.max(np.max(error)))
print('Elapsed time: ',tf-tb,' seconds')
| AbhilashReddyM/GeometricMultigrid | example_FMG_defcor.py | Python | mit | 3,299 | 0.049712 |
import os
import logging
import time
import ConfigParser
from fabric.operations import get
from fabric.api import sudo, local, lcd, cd, shell_env
from aws_collector.config.config import MAIN_CFG, S3_BUCKET
OUTPUT_FILE_FMT = '%s-%s-collect-output.tar'
S3_UPLOAD_CMD = 'aws s3 cp --region us-east-1 %s s3://%s/%s'
def collect(conf, performance_results, output, version, instance):
"""
Copy the files from the remote EC2 instance to the local file system for
later analysis.
:param performance_results: The expression (/tmp/*.cpu) that output files
of the performance test will match, and the ones
we need to copy to our host.
:param output: The local directory where we'll copy the remote files
"""
version = version.replace('/', '-')
output_file = OUTPUT_FILE_FMT % (int(time.time()), version)
logging.info('Output statistics:')
sudo('ls -lah %s' % performance_results)
sudo('du -sh %s' % performance_results)
logging.info('Compressing output...')
# performance_results looks like /tmp/collector/w3af-*
path, file_glob = os.path.split(performance_results)
with cd(path):
sudo('tar -cpvf /tmp/%s %s' % (output_file, file_glob))
# Append config information to tar
sudo('tar -C /tmp/ -rpvf /tmp/%s config' % output_file)
# Compress tar file
sudo('bzip2 -9 /tmp/%s' % output_file)
output_file = '%s.bz2' % output_file
remote_path = '/tmp/%s' % output_file
sudo('ls -lah %s' % remote_path)
# Uploading to S3
try:
target_bucket = conf.get(MAIN_CFG, S3_BUCKET)
except KeyError:
pass
else:
aws_access, aws_secret = get_aws_credentials()
if aws_access and aws_secret:
logging.debug('Uploading data to S3...')
s3_upload = S3_UPLOAD_CMD % (remote_path,
target_bucket,
output_file)
# Needed to upload
sudo('sudo pip install --upgrade awscli')
with cd('/tmp/'):
with shell_env(AWS_ACCESS_KEY_ID=aws_access,
AWS_SECRET_ACCESS_KEY=aws_secret):
sudo(s3_upload)
else:
logging.info('Failed to upload data to S3: No AWS credentials'
' were configured in AWS_ACCESS_KEY AWS_SECRET_KEY')
# Downloading to my workstation
logging.info('Downloading performance information, might take a while...')
# Create the output directory if it doesn't exist
output = os.path.expanduser(output)
local_path = os.path.join(output, version)
#
# Before I stored the output in ~/performance_info/<version>/<instance-id>
# but that did not help with the analysis phase, since I had to remember
# those "long" EC2 instance IDs and... it had nothing to do with the
# analysis itself.
#
# Now I just use ~/performance_info/<version>/<unique-incremental-id>
# where unique-incremental-id is just a number that starts from 0 and
# increments
#
i = -1
while True:
i += 1
potential_output_path = os.path.join(local_path, '%s' % i)
if not os.path.exists(potential_output_path):
os.makedirs(potential_output_path)
local_path = potential_output_path
break
# Get the remote file with all the data
local_file_path = os.path.join(local_path, output_file)
get(remote_path=remote_path, local_path=local_file_path)
logging.debug('Decompress downloaded data...')
with lcd(local_path):
local('tar -jxpvf %s' % output_file)
os.unlink(local_file_path)
def get_aws_credentials():
"""
:return: AWS_ACCESS_KEY AWS_SECRET_KEY from environment variables or ~/.boto
"""
if os.environ.get('AWS_ACCESS_KEY') and os.environ.get('AWS_SECRET_KEY'):
return os.environ.get('AWS_ACCESS_KEY'), os.environ.get('AWS_SECRET_KEY')
elif os.path.exists(os.path.expanduser('~/.boto')):
config = ConfigParser.ConfigParser()
config.read(os.path.expanduser('~/.boto'))
aws_access = config.get('Credentials', 'aws_access_key_id', None)
aws_secret = config.get('Credentials', 'aws_secret_access_key', None)
return aws_access, aws_secret
return None, None | andresriancho/collector | aws_collector/utils/collect.py | Python | gpl-2.0 | 4,386 | 0.000912 |
# -*- coding: utf-8 -*-
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import res_partner
| rosenvladimirov/addons | partner_vat_search/models/__init__.py | Python | agpl-3.0 | 120 | 0 |
"""Smoke tests for the ``CLI`` end-to-end scenario.
:Requirement: Cli Endtoend
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: Hammer
:Assignee: gtalreja
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import pytest
from fauxfactory import gen_alphanumeric
from fauxfactory import gen_ipaddr
from robottelo import manifests
from robottelo import ssh
from robottelo.cli.activationkey import ActivationKey
from robottelo.cli.computeresource import ComputeResource
from robottelo.cli.contentview import ContentView
from robottelo.cli.domain import Domain
from robottelo.cli.factory import make_user
from robottelo.cli.host import Host
from robottelo.cli.hostgroup import HostGroup
from robottelo.cli.lifecycleenvironment import LifecycleEnvironment
from robottelo.cli.location import Location
from robottelo.cli.org import Org
from robottelo.cli.product import Product
from robottelo.cli.repository import Repository
from robottelo.cli.repository_set import RepositorySet
from robottelo.cli.subnet import Subnet
from robottelo.cli.subscription import Subscription
from robottelo.cli.user import User
from robottelo.config import setting_is_set
from robottelo.config import settings
from robottelo.constants import DEFAULT_LOC
from robottelo.constants import DEFAULT_ORG
from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME
from robottelo.constants import PRDS
from robottelo.constants import REPOS
from robottelo.constants import REPOSET
from robottelo.constants.repos import CUSTOM_RPM_REPO
AK_CONTENT_LABEL = 'rhel-6-server-rhev-agent-rpms'
@pytest.fixture(scope='module')
def fake_manifest_is_set():
return setting_is_set('fake_manifest')
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_default_org():
"""Check if 'Default Organization' is present
:id: 95ffeb7a-134e-4273-bccc-fe8a3a336b2a
:expectedresults: 'Default Organization' is found
"""
result = Org.info({'name': DEFAULT_ORG})
assert result['name'] == DEFAULT_ORG
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_default_loc():
"""Check if 'Default Location' is present
:id: 11cf0d06-78ff-47e8-9d50-407a2ea31988
:expectedresults: 'Default Location' is found
"""
result = Location.info({'name': DEFAULT_LOC})
assert result['name'] == DEFAULT_LOC
@pytest.mark.tier1
@pytest.mark.upgrade
def test_positive_cli_find_admin_user():
"""Check if Admin User is present
:id: f6755189-05a6-4d2f-a3b8-98be0cfacaee
:expectedresults: Admin User is found and has Admin role
"""
result = User.info({'login': 'admin'})
assert result['login'] == 'admin'
assert result['admin'] == 'yes'
@pytest.mark.skip_if_not_set('libvirt')
@pytest.mark.tier4
@pytest.mark.upgrade
@pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url')
def test_positive_cli_end_to_end(fake_manifest_is_set, rhel6_contenthost, default_sat):
"""Perform end to end smoke tests using RH and custom repos.
1. Create a new user with admin permissions
2. Using the new user from above
1. Create a new organization
2. Clone and upload manifest
3. Create a new lifecycle environment
4. Create a custom product
5. Create a custom YUM repository
6. Enable a Red Hat repository
7. Synchronize the three repositories
8. Create a new content view
9. Associate the YUM and Red Hat repositories to new content view
10. Publish content view
11. Promote content view to the lifecycle environment
12. Create a new activation key
13. Add the products to the activation key
14. Create a new libvirt compute resource
15. Create a new subnet
16. Create a new domain
17. Create a new hostgroup and associate previous entities to it
18. Provision a client ** NOT CURRENTLY PROVISIONING
:id: 8c8b3ffa-0d54-436b-8eeb-1a3542e100a8
:expectedresults: All tests should succeed and Content should be
successfully fetched by client.
"""
# step 1: Create a new user with admin permissions
password = gen_alphanumeric()
user = make_user({'admin': 'true', 'password': password})
user['password'] = password
# step 2.1: Create a new organization
org = _create(user, Org, {'name': gen_alphanumeric()})
# step 2.2: Clone and upload manifest
if fake_manifest_is_set:
with manifests.clone() as manifest:
ssh.upload_file(manifest.content, manifest.filename)
Subscription.upload({'file': manifest.filename, 'organization-id': org['id']})
# step 2.3: Create a new lifecycle environment
lifecycle_environment = _create(
user,
LifecycleEnvironment,
{'name': gen_alphanumeric(), 'organization-id': org['id'], 'prior': 'Library'},
)
# step 2.4: Create a custom product
product = _create(user, Product, {'name': gen_alphanumeric(), 'organization-id': org['id']})
repositories = []
# step 2.5: Create custom YUM repository
yum_repo = _create(
user,
Repository,
{
'content-type': 'yum',
'name': gen_alphanumeric(),
'product-id': product['id'],
'publish-via-http': 'true',
'url': CUSTOM_RPM_REPO,
},
)
repositories.append(yum_repo)
# step 2.6: Enable a Red Hat repository
if fake_manifest_is_set:
RepositorySet.enable(
{
'basearch': 'x86_64',
'name': REPOSET['rhva6'],
'organization-id': org['id'],
'product': PRDS['rhel'],
'releasever': '6Server',
}
)
rhel_repo = Repository.info(
{
'name': REPOS['rhva6']['name'],
'organization-id': org['id'],
'product': PRDS['rhel'],
}
)
repositories.append(rhel_repo)
# step 2.7: Synchronize the three repositories
for repo in repositories:
Repository.with_user(user['login'], user['password']).synchronize({'id': repo['id']})
# step 2.8: Create content view
content_view = _create(
user, ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']}
)
# step 2.9: Associate the YUM and Red Hat repositories to new content view
for repo in repositories:
ContentView.add_repository(
{
'id': content_view['id'],
'organization-id': org['id'],
'repository-id': repo['id'],
}
)
# step 2.10: Publish content view
ContentView.with_user(user['login'], user['password']).publish({'id': content_view['id']})
# step 2.11: Promote content view to the lifecycle environment
content_view = ContentView.with_user(user['login'], user['password']).info(
{'id': content_view['id']}
)
assert len(content_view['versions']) == 1
cv_version = ContentView.with_user(user['login'], user['password']).version_info(
{'id': content_view['versions'][0]['id']}
)
assert len(cv_version['lifecycle-environments']) == 1
ContentView.with_user(user['login'], user['password']).version_promote(
{'id': cv_version['id'], 'to-lifecycle-environment-id': lifecycle_environment['id']}
)
# check that content view exists in lifecycle
content_view = ContentView.with_user(user['login'], user['password']).info(
{'id': content_view['id']}
)
assert len(content_view['versions']) == 1
cv_version = ContentView.with_user(user['login'], user['password']).version_info(
{'id': content_view['versions'][0]['id']}
)
assert len(cv_version['lifecycle-environments']) == 2
assert cv_version['lifecycle-environments'][-1]['id'] == lifecycle_environment['id']
# step 2.12: Create a new activation key
activation_key = _create(
user,
ActivationKey,
{
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
'name': gen_alphanumeric(),
'organization-id': org['id'],
},
)
# step 2.13: Add the products to the activation key
subscription_list = Subscription.with_user(user['login'], user['password']).list(
{'organization-id': org['id']}, per_page=False
)
for subscription in subscription_list:
if subscription['name'] == DEFAULT_SUBSCRIPTION_NAME:
ActivationKey.with_user(user['login'], user['password']).add_subscription(
{
'id': activation_key['id'],
'quantity': 1,
'subscription-id': subscription['id'],
}
)
# step 2.13.1: Enable product content
if fake_manifest_is_set:
ActivationKey.with_user(user['login'], user['password']).content_override(
{
'content-label': AK_CONTENT_LABEL,
'id': activation_key['id'],
'organization-id': org['id'],
'value': '1',
}
)
# BONUS: Create a content host and associate it with promoted
# content view and last lifecycle where it exists
content_host_name = gen_alphanumeric()
content_host = Host.with_user(user['login'], user['password']).subscription_register(
{
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
'name': content_host_name,
'organization-id': org['id'],
}
)
content_host = Host.with_user(user['login'], user['password']).info({'id': content_host['id']})
# check that content view matches what we passed
assert content_host['content-information']['content-view']['name'] == content_view['name']
# check that lifecycle environment matches
assert (
content_host['content-information']['lifecycle-environment']['name']
== lifecycle_environment['name']
)
# step 2.14: Create a new libvirt compute resource
_create(
user,
ComputeResource,
{
'name': gen_alphanumeric(),
'provider': 'Libvirt',
'url': f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system',
},
)
# step 2.15: Create a new subnet
subnet = _create(
user,
Subnet,
{
'name': gen_alphanumeric(),
'network': gen_ipaddr(ip3=True),
'mask': '255.255.255.0',
},
)
# step 2.16: Create a new domain
domain = _create(user, Domain, {'name': gen_alphanumeric()})
# step 2.17: Create a new hostgroup and associate previous entities to it
host_group = _create(
user,
HostGroup,
{'domain-id': domain['id'], 'name': gen_alphanumeric(), 'subnet-id': subnet['id']},
)
HostGroup.with_user(user['login'], user['password']).update(
{
'id': host_group['id'],
'organization-ids': org['id'],
'content-view-id': content_view['id'],
'lifecycle-environment-id': lifecycle_environment['id'],
}
)
# step 2.18: Provision a client
# TODO this isn't provisioning through satellite as intended
# Note it wasn't well before the change that added this todo
rhel6_contenthost.install_katello_ca(default_sat)
# Register client with foreman server using act keys
rhel6_contenthost.register_contenthost(org['label'], activation_key['name'])
assert rhel6_contenthost.subscribed
# Install rpm on client
package_name = 'python-kitchen'
result = rhel6_contenthost.execute(f'yum install -y {package_name}')
assert result.status == 0
# Verify that the package is installed by querying it
result = rhel6_contenthost.run(f'rpm -q {package_name}')
assert result.status == 0
def _create(user, entity, attrs):
"""Creates a Foreman entity and returns it.
:param dict user: A python dictionary representing a User
:param object entity: A valid CLI entity.
:param dict attrs: A python dictionary with attributes to use when
creating entity.
:return: A ``dict`` representing the Foreman entity.
:rtype: dict
"""
# Create new entity as new user
return entity.with_user(user['login'], user['password']).create(attrs)
| lpramuk/robottelo | tests/foreman/endtoend/test_cli_endtoend.py | Python | gpl-3.0 | 12,481 | 0.001683 |
import logging
import commands
import random
from autotest.client.shared import error
from autotest.client import utils
from virttest import utils_misc
from virttest import utils_test
from virttest import utils_net
@error.context_aware
def run(test, params, env):
"""
Test the RX jumbo frame function of vnics:
1) Boot the VM.
2) Change the MTU of guest nics and host taps depending on the NIC model.
3) Add the static ARP entry for guest NIC.
4) Wait for the MTU ok.
5) Verify the path MTU using ping.
6) Ping the guest with large frames.
7) Increment size ping.
8) Flood ping the guest with large frames.
9) Verify the path MTU.
10) Recover the MTU.
:param test: QEMU test object.
:param params: Dictionary with the test parameters.
:param env: Dictionary with test environment.
"""
timeout = int(params.get("login_timeout", 360))
mtu = params.get("mtu", "1500")
def_max_icmp_size = int(mtu) - 28
max_icmp_pkt_size = int(params.get("max_icmp_pkt_size",
def_max_icmp_size))
flood_time = params.get("flood_time", "300")
os_type = params.get("os_type")
os_variant = params.get("os_variant")
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
session = vm.wait_for_login(timeout=timeout)
session_serial = vm.wait_for_serial_login(timeout=timeout)
ifname = vm.get_ifname(0)
guest_ip = vm.get_address(0)
if guest_ip is None:
raise error.TestError("Could not get the guest ip address")
try:
error.context("Changing the MTU of guest", logging.info)
# Environment preparation
mac = vm.get_mac_address(0)
if os_type == "linux":
ethname = utils_net.get_linux_ifname(session, mac)
guest_mtu_cmd = "ifconfig %s mtu %s" % (ethname, mtu)
else:
connection_id = utils_net.get_windows_nic_attribute(session,
"macaddress", mac, "netconnectionid")
index = utils_net.get_windows_nic_attribute(session,
"netconnectionid", connection_id, "index")
if os_variant == "winxp":
pnpdevice_id = utils_net.get_windows_nic_attribute(session,
"netconnectionid", connection_id, "pnpdeviceid")
cd_num = utils_misc.get_winutils_vol(session)
copy_cmd = r"xcopy %s:\devcon\wxp_x86\devcon.exe c:\ " % cd_num
session.cmd(copy_cmd)
reg_set_mtu_pattern = params.get("reg_mtu_cmd")
mtu_key_word = params.get("mtu_key", "MTU")
reg_set_mtu = reg_set_mtu_pattern % (int(index), mtu_key_word,
int(mtu))
guest_mtu_cmd = "%s " % reg_set_mtu
session.cmd(guest_mtu_cmd)
if os_type == "windows":
mode = "netsh"
if os_variant == "winxp":
connection_id = pnpdevice_id.split("&")[-1]
mode = "devcon"
utils_net.restart_windows_guest_network(session_serial,
connection_id,
mode=mode)
error.context("Chaning the MTU of host tap ...", logging.info)
host_mtu_cmd = "ifconfig %s mtu %s"
# Before change macvtap mtu, must set the base interface mtu
if params.get("nettype") == "macvtap":
base_if = utils_net.get_macvtap_base_iface(params.get("netdst"))
utils.run(host_mtu_cmd % (base_if, mtu))
utils.run(host_mtu_cmd % (ifname, mtu))
error.context("Add a temporary static ARP entry ...", logging.info)
arp_add_cmd = "arp -s %s %s -i %s" % (guest_ip, mac, ifname)
utils.run(arp_add_cmd)
def is_mtu_ok():
status, _ = utils_test.ping(guest_ip, 1, interface=ifname,
packetsize=max_icmp_pkt_size,
hint="do", timeout=2)
return status == 0
def verify_mtu():
logging.info("Verify the path MTU")
status, output = utils_test.ping(guest_ip, 10, interface=ifname,
packetsize=max_icmp_pkt_size,
hint="do", timeout=15)
if status != 0:
logging.error(output)
raise error.TestFail("Path MTU is not as expected")
if utils_test.get_loss_ratio(output) != 0:
logging.error(output)
raise error.TestFail("Packet loss ratio during MTU "
"verification is not zero")
def flood_ping():
logging.info("Flood with large frames")
utils_test.ping(guest_ip, interface=ifname,
packetsize=max_icmp_pkt_size,
flood=True, timeout=float(flood_time))
def large_frame_ping(count=100):
logging.info("Large frame ping")
_, output = utils_test.ping(guest_ip, count, interface=ifname,
packetsize=max_icmp_pkt_size,
timeout=float(count) * 2)
ratio = utils_test.get_loss_ratio(output)
if ratio != 0:
raise error.TestFail("Loss ratio of large frame ping is %s" %
ratio)
def size_increase_ping(step=random.randrange(90, 110)):
logging.info("Size increase ping")
for size in range(0, max_icmp_pkt_size + 1, step):
logging.info("Ping %s with size %s", guest_ip, size)
status, output = utils_test.ping(guest_ip, 1, interface=ifname,
packetsize=size,
hint="do", timeout=1)
if status != 0:
status, output = utils_test.ping(guest_ip, 10,
interface=ifname,
packetsize=size,
adaptive=True,
hint="do",
timeout=20)
fail_ratio = int(params.get("fail_ratio", 50))
if utils_test.get_loss_ratio(output) > fail_ratio:
raise error.TestFail("Ping loss ratio is greater "
"than 50% for size %s" % size)
logging.info("Waiting for the MTU to be OK")
wait_mtu_ok = 10
if not utils_misc.wait_for(is_mtu_ok, wait_mtu_ok, 0, 1):
logging.debug(commands.getoutput("ifconfig -a"))
raise error.TestError("MTU is not as expected even after %s "
"seconds" % wait_mtu_ok)
# Functional Test
error.context("Checking whether MTU change is ok", logging.info)
verify_mtu()
large_frame_ping()
size_increase_ping()
# Stress test
flood_ping()
verify_mtu()
finally:
# Environment clean
if session:
session.close()
if utils.system("grep '%s.*%s' /proc/net/arp" % (guest_ip, ifname)) == '0':
utils.run("arp -d %s -i %s" % (guest_ip, ifname))
logging.info("Removing the temporary ARP entry successfully")
| tolimit/tp-qemu | generic/tests/jumbo.py | Python | gpl-2.0 | 7,693 | 0.00052 |
#-------------------------------------------------------------------------------
# Copyright (c) 2011 Anton Golubkov.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Lesser Public License v2.1
# which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
#
# Contributors:
# Anton Golubkov - initial API and implementation
#-------------------------------------------------------------------------------
#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.split
class TestSplitBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.split.Split()
self.test_image = cv.LoadImage("files/test.png")
self.block.input_ports["input_image"].pass_value(self.test_image)
def test_output_image_channels(self):
""" Test return to output ports 3 one-channel images of same size
"""
self.block.process()
image_1 = self.block.output_ports["output_image_1"].get_value()
image_2 = self.block.output_ports["output_image_2"].get_value()
image_3 = self.block.output_ports["output_image_3"].get_value()
self.assertEqual(image_1.nChannels, 1)
self.assertEqual(image_2.nChannels, 1)
self.assertEqual(image_3.nChannels, 1)
def test_output_image_size(self):
""" Test return to output ports images of same size
"""
self.block.process()
image_1 = self.block.output_ports["output_image_1"].get_value()
image_2 = self.block.output_ports["output_image_2"].get_value()
image_3 = self.block.output_ports["output_image_3"].get_value()
self.assertEqual((image_1.width, image_1.height),
(self.test_image.width, self.test_image.height))
self.assertEqual((image_2.width, image_2.height),
(self.test_image.width, self.test_image.height))
self.assertEqual((image_3.width, image_3.height),
(self.test_image.width, self.test_image.height))
def test_output_image(self):
self.block.process()
output_image_1 = self.block.output_ports["output_image_1"].get_value()
output_image_2 = self.block.output_ports["output_image_2"].get_value()
output_image_3 = self.block.output_ports["output_image_3"].get_value()
cv.SaveImage("files/test_split_out_1.png", output_image_1)
cv.SaveImage("files/test_split_out_2.png", output_image_2)
cv.SaveImage("files/test_split_out_3.png", output_image_3)
loaded_image = cv.LoadImage("files/test_split_out_1.png")
test_loaded_image = cv.LoadImage("files/test_split_1.png")
self.assertEqual(loaded_image.tostring(), test_loaded_image.tostring())
loaded_image = cv.LoadImage("files/test_split_out_2.png")
test_loaded_image = cv.LoadImage("files/test_split_2.png")
self.assertEqual(loaded_image.tostring(), test_loaded_image.tostring())
loaded_image = cv.LoadImage("files/test_split_out_3.png")
test_loaded_image = cv.LoadImage("files/test_split_3.png")
self.assertEqual(loaded_image.tostring(), test_loaded_image.tostring())
def test_zero_image(self):
zero_image = cv.CreateImage( (0, 0), cv.IPL_DEPTH_8U, 3)
self.block.input_ports["input_image"].pass_value(zero_image)
self.block.process()
zero_image_1c = cv.CreateImage( (0, 0), cv.IPL_DEPTH_8U, 1)
output_image_1 = self.block.output_ports["output_image_1"].get_value()
output_image_2 = self.block.output_ports["output_image_2"].get_value()
output_image_3 = self.block.output_ports["output_image_3"].get_value()
self.assertEqual(output_image_1.tostring(), zero_image_1c.tostring())
self.assertEqual(output_image_2.tostring(), zero_image_1c.tostring())
self.assertEqual(output_image_3.tostring(), zero_image_1c.tostring())
if __name__ == '__main__':
unittest.main()
| anton-golubkov/Garland | src/test/test_splitblock.py | Python | lgpl-2.1 | 4,345 | 0.008055 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class EncryptionProtectorPaged(Paged):
"""
A paging container for iterating over a list of EncryptionProtector object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[EncryptionProtector]'}
}
def __init__(self, *args, **kwargs):
super(EncryptionProtectorPaged, self).__init__(*args, **kwargs)
| SUSE/azure-sdk-for-python | azure-mgmt-sql/azure/mgmt/sql/models/encryption_protector_paged.py | Python | mit | 918 | 0 |
#!/usr/bin/env python
# Copyright 2008 Orbitz WorldWide
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# This module is an implementation of the Whisper database API
# Here is the basic layout of a whisper data file
#
# File = Header,Data
# Header = Metadata,ArchiveInfo+
# Metadata = lastUpdate,maxRetention,xFilesFactor,archiveCount
# ArchiveInfo = Offset,SecondsPerPoint,Points
# Data = Archive+
# Archive = Point+
# Point = timestamp,value
#
# NOTE: the lastUpdate field is deprecated, do not use it!
import os, struct, time
try:
import fcntl
CAN_LOCK = True
except ImportError:
CAN_LOCK = False
LOCK = False
CACHE_HEADERS = False
__headerCache = {}
longFormat = "!L"
longSize = struct.calcsize(longFormat)
floatFormat = "!f"
floatSize = struct.calcsize(floatFormat)
timestampFormat = "!L"
timestampSize = struct.calcsize(timestampFormat)
valueFormat = "!d"
valueSize = struct.calcsize(valueFormat)
pointFormat = "!Ld"
pointSize = struct.calcsize(pointFormat)
metadataFormat = "!2LfL"
metadataSize = struct.calcsize(metadataFormat)
archiveInfoFormat = "!3L"
archiveInfoSize = struct.calcsize(archiveInfoFormat)
debug = startBlock = endBlock = lambda *a,**k: None
class WhisperException(Exception):
"""Base class for whisper exceptions."""
class InvalidConfiguration(WhisperException):
"""Invalid configuration."""
class InvalidTimeInterval(WhisperException):
"""Invalid time interval."""
class TimestampNotCovered(WhisperException):
"""Timestamp not covered by any archives in this database."""
def enableDebug():
global open, debug, startBlock, endBlock
class open(file):
def __init__(self,*args,**kwargs):
file.__init__(self,*args,**kwargs)
self.writeCount = 0
self.readCount = 0
def write(self,data):
self.writeCount += 1
debug('WRITE %d bytes #%d' % (len(data),self.writeCount))
return file.write(self,data)
def read(self,bytes):
self.readCount += 1
debug('READ %d bytes #%d' % (bytes,self.readCount))
return file.read(self,bytes)
def debug(message):
print 'DEBUG :: %s' % message
__timingBlocks = {}
def startBlock(name):
__timingBlocks[name] = time.time()
def endBlock(name):
debug("%s took %.5f seconds" % (name,time.time() - __timingBlocks.pop(name)))
def __readHeader(fh):
info = __headerCache.get(fh.name)
if info: return info
#startBlock('__readHeader')
originalOffset = fh.tell()
fh.seek(0)
packedMetadata = fh.read(metadataSize)
(lastUpdate,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata)
archives = []
for i in xrange(archiveCount):
packedArchiveInfo = fh.read(archiveInfoSize)
(offset,secondsPerPoint,points) = struct.unpack(archiveInfoFormat,packedArchiveInfo)
archiveInfo = {
'offset' : offset,
'secondsPerPoint' : secondsPerPoint,
'points' : points,
'retention' : secondsPerPoint * points,
'size' : points * pointSize,
}
archives.append(archiveInfo)
fh.seek(originalOffset)
info = {
#'lastUpdate' : lastUpdate, # Deprecated
'maxRetention' : maxRetention,
'xFilesFactor' : xff,
'archives' : archives,
}
if CACHE_HEADERS:
__headerCache[fh.name] = info
#endBlock('__readHeader')
return info
def __changeLastUpdate(fh):
return #XXX Make this a NOP, use os.stat(filename).st_mtime instead
startBlock('__changeLastUpdate()')
originalOffset = fh.tell()
fh.seek(0) #Based on assumption that first field is lastUpdate
now = int( time.time() )
packedTime = struct.pack(timestampFormat,now)
fh.write(packedTime)
fh.seek(originalOffset)
endBlock('__changeLastUpdate()')
def create(path,archiveList,xFilesFactor=0.5):
"""create(path,archiveList,xFilesFactor=0.5)
path is a string
archiveList is a list of archives, each of which is of the form (secondsPerPoint,numberOfPoints)
xFilesFactor specifies the fraction of data points in a propagation interval that must have known values for a propagation to occur
"""
#Validate archive configurations...
if not archiveList:
raise InvalidConfiguration("You must specify at least one archive configuration!")
archiveList.sort(key=lambda a: a[0]) #sort by precision (secondsPerPoint)
for i,archive in enumerate(archiveList):
if i == len(archiveList) - 1: break
next = archiveList[i+1]
if not (archive[0] < next[0]):
raise InvalidConfiguration("You cannot configure two archives "
"with the same precision %s,%s" % (archive,next))
if (next[0] % archive[0]) != 0:
raise InvalidConfiguration("Higher precision archives' precision "
"must evenly divide all lower precision archives' precision %s,%s" \
% (archive[0],next[0]))
retention = archive[0] * archive[1]
nextRetention = next[0] * next[1]
if not (nextRetention > retention):
raise InvalidConfiguration("Lower precision archives must cover "
"larger time intervals than higher precision archives %s,%s" \
% (archive,next))
#Looks good, now we create the file and write the header
if os.path.exists(path):
raise InvalidConfiguration("File %s already exists!" % path)
fh = open(path,'wb')
if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
lastUpdate = struct.pack( timestampFormat, int(time.time()) )
oldest = sorted([secondsPerPoint * points for secondsPerPoint,points in archiveList])[-1]
maxRetention = struct.pack( longFormat, oldest )
xFilesFactor = struct.pack( floatFormat, float(xFilesFactor) )
archiveCount = struct.pack(longFormat, len(archiveList))
packedMetadata = lastUpdate + maxRetention + xFilesFactor + archiveCount
fh.write(packedMetadata)
headerSize = metadataSize + (archiveInfoSize * len(archiveList))
archiveOffsetPointer = headerSize
for secondsPerPoint,points in archiveList:
archiveInfo = struct.pack(archiveInfoFormat, archiveOffsetPointer, secondsPerPoint, points)
fh.write(archiveInfo)
archiveOffsetPointer += (points * pointSize)
zeroes = '\x00' * (archiveOffsetPointer - headerSize)
fh.write(zeroes)
fh.close()
def __propagate(fh,timestamp,xff,higher,lower):
lowerIntervalStart = timestamp - (timestamp % lower['secondsPerPoint'])
lowerIntervalEnd = lowerIntervalStart + lower['secondsPerPoint']
fh.seek(higher['offset'])
packedPoint = fh.read(pointSize)
(higherBaseInterval,higherBaseValue) = struct.unpack(pointFormat,packedPoint)
if higherBaseInterval == 0:
higherFirstOffset = higher['offset']
else:
timeDistance = lowerIntervalStart - higherBaseInterval
pointDistance = timeDistance / higher['secondsPerPoint']
byteDistance = pointDistance * pointSize
higherFirstOffset = higher['offset'] + (byteDistance % higher['size'])
higherPoints = lower['secondsPerPoint'] / higher['secondsPerPoint']
higherSize = higherPoints * pointSize
relativeFirstOffset = higherFirstOffset - higher['offset']
relativeLastOffset = (relativeFirstOffset + higherSize) % higher['size']
higherLastOffset = relativeLastOffset + higher['offset']
fh.seek(higherFirstOffset)
if higherFirstOffset < higherLastOffset: #we don't wrap the archive
seriesString = fh.read(higherLastOffset - higherFirstOffset)
else: #We do wrap the archive
higherEnd = higher['offset'] + higher['size']
seriesString = fh.read(higherEnd - higherFirstOffset)
fh.seek(higher['offset'])
seriesString += fh.read(higherLastOffset - higher['offset'])
#Now we unpack the series data we just read
byteOrder,pointTypes = pointFormat[0],pointFormat[1:]
points = len(seriesString) / pointSize
seriesFormat = byteOrder + (pointTypes * points)
unpackedSeries = struct.unpack(seriesFormat, seriesString)
#And finally we construct a list of values
neighborValues = [None] * points
currentInterval = lowerIntervalStart
step = higher['secondsPerPoint']
for i in xrange(0,len(unpackedSeries),2):
pointTime = unpackedSeries[i]
if pointTime == currentInterval:
neighborValues[i/2] = unpackedSeries[i+1]
currentInterval += step
#Propagate aggregateValue to propagate from neighborValues if we have enough known points
knownValues = [v for v in neighborValues if v is not None]
if not knownValues:
return False
knownPercent = float(len(knownValues)) / float(len(neighborValues))
if knownPercent >= xff: #we have enough data to propagate a value!
aggregateValue = float(sum(knownValues)) / float(len(knownValues)) #TODO another CF besides average?
myPackedPoint = struct.pack(pointFormat,lowerIntervalStart,aggregateValue)
fh.seek(lower['offset'])
packedPoint = fh.read(pointSize)
(lowerBaseInterval,lowerBaseValue) = struct.unpack(pointFormat,packedPoint)
if lowerBaseInterval == 0: #First propagated update to this lower archive
fh.seek(lower['offset'])
fh.write(myPackedPoint)
else: #Not our first propagated update to this lower archive
timeDistance = lowerIntervalStart - lowerBaseInterval
pointDistance = timeDistance / lower['secondsPerPoint']
byteDistance = pointDistance * pointSize
lowerOffset = lower['offset'] + (byteDistance % lower['size'])
fh.seek(lowerOffset)
fh.write(myPackedPoint)
return True
else:
return False
def update(path,value,timestamp=None):
"""update(path,value,timestamp=None)
path is a string
value is a float
timestamp is either an int or float
"""
value = float(value)
fh = open(path,'r+b')
return file_update(fh, value, timestamp)
def file_update(fh, value, timestamp):
if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
header = __readHeader(fh)
now = int( time.time() )
if timestamp is None: timestamp = now
timestamp = int(timestamp)
diff = now - timestamp
if not ((diff < header['maxRetention']) and diff >= 0):
raise TimestampNotCovered("Timestamp not covered by any archives in "
"this database.")
for i,archive in enumerate(header['archives']): #Find the highest-precision archive that covers timestamp
if archive['retention'] < diff: continue
lowerArchives = header['archives'][i+1:] #We'll pass on the update to these lower precision archives later
break
#First we update the highest-precision archive
myInterval = timestamp - (timestamp % archive['secondsPerPoint'])
myPackedPoint = struct.pack(pointFormat,myInterval,value)
fh.seek(archive['offset'])
packedPoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedPoint)
if baseInterval == 0: #This file's first update
fh.seek(archive['offset'])
fh.write(myPackedPoint)
baseInterval,baseValue = myInterval,value
else: #Not our first update
timeDistance = myInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
myOffset = archive['offset'] + (byteDistance % archive['size'])
fh.seek(myOffset)
fh.write(myPackedPoint)
#Now we propagate the update to lower-precision archives
#startBlock('update propagation')
higher = archive
for lower in lowerArchives:
if not __propagate(fh,myInterval,header['xFilesFactor'],higher,lower): break
higher = lower
#endBlock('update propagation')
__changeLastUpdate(fh)
fh.close()
def update_many(path,points):
"""update_many(path,points)
path is a string
points is a list of (timestamp,value) points
"""
if not points: return
points = [ (int(t),float(v)) for (t,v) in points]
points.sort(key=lambda p: p[0],reverse=True) #order points by timestamp, newest first
fh = open(path,'r+b')
return file_update_many(fh, points)
def file_update_many(fh, points):
if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX )
header = __readHeader(fh)
now = int( time.time() )
archives = iter( header['archives'] )
currentArchive = archives.next()
#debug(' update_many currentArchive=%s' % str(currentArchive))
currentPoints = []
for point in points:
age = now - point[0]
#debug(' update_many iterating points, point=%s age=%d' % (str(point),age))
while currentArchive['retention'] < age: #we can't fit any more points in this archive
#debug(' update_many this point is too old to fit here, currentPoints=%d' % len(currentPoints))
if currentPoints: #commit all the points we've found that it can fit
currentPoints.reverse() #put points in chronological order
__archive_update_many(fh,header,currentArchive,currentPoints)
currentPoints = []
try:
currentArchive = archives.next()
#debug(' update_many using next archive %s' % str(currentArchive))
except StopIteration:
#debug(' update_many no more archives!')
currentArchive = None
break
if not currentArchive: break #drop remaining points that don't fit in the database
#debug(' update_many adding point=%s' % str(point))
currentPoints.append(point)
#debug(' update_many done iterating points')
if currentArchive and currentPoints: #don't forget to commit after we've checked all the archives
currentPoints.reverse()
__archive_update_many(fh,header,currentArchive,currentPoints)
__changeLastUpdate(fh)
fh.close()
def __archive_update_many(fh,header,archive,points):
step = archive['secondsPerPoint']
#startBlock('__archive_update_many file=%s archive=%s points=%d' % (fh.name,step,len(points)))
alignedPoints = [ (timestamp - (timestamp % step), value)
for (timestamp,value) in points ]
#Create a packed string for each contiguous sequence of points
#startBlock('__archive_update_many string packing')
packedStrings = []
previousInterval = None
currentString = ""
for (interval,value) in alignedPoints:
#debug('__archive_update_many iterating alignedPoint at %s' % interval)
if (not previousInterval) or (interval == previousInterval + step):
#debug('__archive_update_many was expected, packing onto currentString')
currentString += struct.pack(pointFormat,interval,value)
previousInterval = interval
else:
numberOfPoints = len(currentString) / pointSize
startInterval = previousInterval - (step * (numberOfPoints-1))
#debug('__archive_update_many was NOT expected, appending to packedStrings startInterval=%s currentString=%d bytes' % (startInterval,len(currentString)))
packedStrings.append( (startInterval,currentString) )
currentString = struct.pack(pointFormat,interval,value)
previousInterval = interval
if currentString:
#startInterval = previousInterval - (step * len(currentString) / pointSize) + step
numberOfPoints = len(currentString) / pointSize
startInterval = previousInterval - (step * (numberOfPoints-1))
#debug('__archive_update_many done iterating alignedPoints, remainder currentString of %d bytes, startInterval=%s' % (len(currentString),startInterval))
packedStrings.append( (startInterval,currentString) )
#endBlock('__archive_update_many string packing')
#Read base point and determine where our writes will start
fh.seek(archive['offset'])
packedBasePoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedBasePoint)
if baseInterval == 0: #This file's first update
#debug('__archive_update_many first update')
baseInterval = packedStrings[0][0] #use our first string as the base, so we start at the start
#debug('__archive_update_many baseInterval is %s' % baseInterval)
#Write all of our packed strings in locations determined by the baseInterval
#startBlock('__archive_update_many write() operations')
for (interval,packedString) in packedStrings:
timeDistance = interval - baseInterval
pointDistance = timeDistance / step
byteDistance = pointDistance * pointSize
myOffset = archive['offset'] + (byteDistance % archive['size'])
fh.seek(myOffset)
archiveEnd = archive['offset'] + archive['size']
bytesBeyond = (myOffset + len(packedString)) - archiveEnd
#debug(' __archive_update_many myOffset=%d packedString=%d archiveEnd=%d bytesBeyond=%d' % (myOffset,len(packedString),archiveEnd,bytesBeyond))
if bytesBeyond > 0:
fh.write( packedString[:-bytesBeyond] )
#debug('We wrapped an archive!')
assert fh.tell() == archiveEnd, "archiveEnd=%d fh.tell=%d bytesBeyond=%d len(packedString)=%d" % (archiveEnd,fh.tell(),bytesBeyond,len(packedString))
fh.seek( archive['offset'] )
fh.write( packedString[-bytesBeyond:] ) #safe because it can't exceed the archive (retention checking logic above)
else:
fh.write(packedString)
#endBlock('__archive_update_many write() operations')
#Now we propagate the updates to lower-precision archives
#startBlock('__archive_update_many propagation')
higher = archive
lowerArchives = [arc for arc in header['archives'] if arc['secondsPerPoint'] > archive['secondsPerPoint']]
#debug('__archive_update_many I have %d lower archives' % len(lowerArchives))
for lower in lowerArchives:
fit = lambda i: i - (i % lower['secondsPerPoint'])
lowerIntervals = [fit(p[0]) for p in alignedPoints]
uniqueLowerIntervals = set(lowerIntervals)
#debug(' __archive_update_many points=%d unique=%d' % (len(alignedPoints),len(uniqueLowerIntervals)))
propagateFurther = False
for interval in uniqueLowerIntervals:
#debug(' __archive_update_many propagating from %d to %d, interval=%d' % (higher['secondsPerPoint'],lower['secondsPerPoint'],interval))
if __propagate(fh,interval,header['xFilesFactor'],higher,lower):
propagateFurther = True
#debug(' __archive_update_many Successful propagation!')
#debug(' __archive_update_many propagateFurther=%s' % propagateFurther)
if not propagateFurther: break
higher = lower
#endBlock('__archive_update_many propagation')
#endBlock('__archive_update_many file=%s archive=%s points=%d' % (fh.name,step,len(points)))
def info(path):
"""info(path)
path is a string
"""
fh = open(path,'rb')
info = __readHeader(fh)
fh.close()
return info
def fetch(path,fromTime,untilTime=None):
"""fetch(path,fromTime,untilTime=None)
path is a string
fromTime is an epoch time
untilTime is also an epoch time, but defaults to now
"""
fh = open(path,'rb')
return file_fetch(fh, fromTime, untilTime)
def file_fetch(fh, fromTime, untilTime):
header = __readHeader(fh)
now = int( time.time() )
if untilTime is None:
untilTime = now
fromTime = int(fromTime)
untilTime = int(untilTime)
oldestTime = now - header['maxRetention']
if fromTime < oldestTime:
fromTime = oldestTime
if not (fromTime < untilTime):
raise InvalidTimeInterval("Invalid time interval")
if untilTime > now:
untilTime = now
if untilTime < fromTime:
untilTime = now
diff = now - fromTime
for archive in header['archives']:
if archive['retention'] >= diff: break
fromInterval = int( fromTime - (fromTime % archive['secondsPerPoint']) ) + archive['secondsPerPoint']
untilInterval = int( untilTime - (untilTime % archive['secondsPerPoint']) ) + archive['secondsPerPoint']
fh.seek(archive['offset'])
packedPoint = fh.read(pointSize)
(baseInterval,baseValue) = struct.unpack(pointFormat,packedPoint)
if baseInterval == 0:
step = archive['secondsPerPoint']
points = (untilInterval - fromInterval) / step
timeInfo = (fromInterval,untilInterval,step)
valueList = [None] * points
return (timeInfo,valueList)
#Determine fromOffset
timeDistance = fromInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
fromOffset = archive['offset'] + (byteDistance % archive['size'])
#Determine untilOffset
timeDistance = untilInterval - baseInterval
pointDistance = timeDistance / archive['secondsPerPoint']
byteDistance = pointDistance * pointSize
untilOffset = archive['offset'] + (byteDistance % archive['size'])
#Read all the points in the interval
fh.seek(fromOffset)
if fromOffset < untilOffset: #If we don't wrap around the archive
seriesString = fh.read(untilOffset - fromOffset)
else: #We do wrap around the archive, so we need two reads
archiveEnd = archive['offset'] + archive['size']
seriesString = fh.read(archiveEnd - fromOffset)
fh.seek(archive['offset'])
seriesString += fh.read(untilOffset - archive['offset'])
#Now we unpack the series data we just read (anything faster than unpack?)
byteOrder,pointTypes = pointFormat[0],pointFormat[1:]
points = len(seriesString) / pointSize
seriesFormat = byteOrder + (pointTypes * points)
unpackedSeries = struct.unpack(seriesFormat, seriesString)
#And finally we construct a list of values (optimize this!)
valueList = [None] * points #pre-allocate entire list for speed
currentInterval = fromInterval
step = archive['secondsPerPoint']
for i in xrange(0,len(unpackedSeries),2):
pointTime = unpackedSeries[i]
if pointTime == currentInterval:
pointValue = unpackedSeries[i+1]
valueList[i/2] = pointValue #in-place reassignment is faster than append()
currentInterval += step
fh.close()
timeInfo = (fromInterval,untilInterval,step)
return (timeInfo,valueList)
| eric/whisper-rb | lib/whisper/py/whisper.py | Python | mit | 21,690 | 0.026418 |
from __future__ import absolute_import
import datetime
import jwt
import re
import logging
from six.moves.urllib.parse import parse_qs, urlparse, urlsplit
from sentry.integrations.atlassian_connect import get_query_hash
from sentry.shared_integrations.exceptions import ApiError
from sentry.integrations.client import ApiClient
from sentry.utils.http import absolute_uri
logger = logging.getLogger("sentry.integrations.jira")
JIRA_KEY = "%s.jira" % (urlparse(absolute_uri()).hostname,)
ISSUE_KEY_RE = re.compile(r"^[A-Za-z][A-Za-z0-9]*-\d+$")
class JiraCloud(object):
"""
Contains the jira-cloud specifics that a JiraClient needs
in order to communicate with jira
"""
def __init__(self, shared_secret):
self.shared_secret = shared_secret
@property
def cache_prefix(self):
return "sentry-jira-2:"
def request_hook(self, method, path, data, params, **kwargs):
"""
Used by Jira Client to apply the jira-cloud authentication
"""
# handle params that are already part of the path
url_params = dict(parse_qs(urlsplit(path).query))
url_params.update(params or {})
path = path.split("?")[0]
jwt_payload = {
"iss": JIRA_KEY,
"iat": datetime.datetime.utcnow(),
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=5 * 60),
"qsh": get_query_hash(path, method.upper(), url_params),
}
encoded_jwt = jwt.encode(jwt_payload, self.shared_secret)
params = dict(jwt=encoded_jwt, **(url_params or {}))
request_spec = kwargs.copy()
request_spec.update(dict(method=method, path=path, data=data, params=params))
return request_spec
def user_id_field(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
def user_query_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use query
"""
return "query"
def user_id_get_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
class JiraApiClient(ApiClient):
# TODO: Update to v3 endpoints
COMMENTS_URL = "/rest/api/2/issue/%s/comment"
COMMENT_URL = "/rest/api/2/issue/%s/comment/%s"
STATUS_URL = "/rest/api/2/status"
CREATE_URL = "/rest/api/2/issue"
ISSUE_URL = "/rest/api/2/issue/%s"
META_URL = "/rest/api/2/issue/createmeta"
PRIORITIES_URL = "/rest/api/2/priority"
PROJECT_URL = "/rest/api/2/project"
SEARCH_URL = "/rest/api/2/search/"
VERSIONS_URL = "/rest/api/2/project/%s/versions"
USERS_URL = "/rest/api/2/user/assignable/search"
USER_URL = "/rest/api/2/user"
SERVER_INFO_URL = "/rest/api/2/serverInfo"
ASSIGN_URL = "/rest/api/2/issue/%s/assignee"
TRANSITION_URL = "/rest/api/2/issue/%s/transitions"
EMAIL_URL = "/rest/api/3/user/email"
integration_name = "jira"
# This timeout is completely arbitrary. Jira doesn't give us any
# caching headers to work with. Ideally we want a duration that
# lets the user make their second jira issue with cached data.
cache_time = 240
def __init__(self, base_url, jira_style, verify_ssl, logging_context=None):
self.base_url = base_url
# `jira_style` encapsulates differences between jira server & jira cloud.
# We only support one API version for Jira, but server/cloud require different
# authentication mechanisms and caching.
self.jira_style = jira_style
super(JiraApiClient, self).__init__(verify_ssl, logging_context)
def get_cache_prefix(self):
return self.jira_style.cache_prefix
def request(self, method, path, data=None, params=None, **kwargs):
"""
Use the request_hook method for our specific style of Jira to
add authentication data and transform parameters.
"""
request_spec = self.jira_style.request_hook(method, path, data, params, **kwargs)
if "headers" not in request_spec:
request_spec["headers"] = {}
# Force adherence to the GDPR compliant API conventions.
# See
# https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-user-privacy-api-migration-guide
request_spec["headers"]["x-atlassian-force-account-id"] = "true"
return self._request(**request_spec)
def user_id_get_param(self):
return self.jira_style.user_id_get_param()
def user_id_field(self):
return self.jira_style.user_id_field()
def user_query_param(self):
return self.jira_style.user_query_param()
def get_issue(self, issue_id):
return self.get(self.ISSUE_URL % (issue_id,))
def search_issues(self, query):
# check if it looks like an issue id
if ISSUE_KEY_RE.match(query):
jql = 'id="%s"' % query.replace('"', '\\"')
else:
jql = 'text ~ "%s"' % query.replace('"', '\\"')
return self.get(self.SEARCH_URL, params={"jql": jql})
def create_comment(self, issue_key, comment):
return self.post(self.COMMENTS_URL % issue_key, data={"body": comment})
def update_comment(self, issue_key, comment_id, comment):
return self.put(self.COMMENT_URL % (issue_key, comment_id), data={"body": comment})
def get_projects_list(self):
return self.get_cached(self.PROJECT_URL)
def get_project_key_for_id(self, project_id):
if not project_id:
return ""
projects = self.get_projects_list()
for project in projects:
if project["id"] == project_id:
return project["key"].encode("utf-8")
return ""
def get_create_meta_for_project(self, project):
params = {"expand": "projects.issuetypes.fields", "projectIds": project}
metas = self.get_cached(self.META_URL, params=params)
# We saw an empty JSON response come back from the API :(
if not metas:
logger.info(
"jira.get-create-meta.empty-response",
extra={"base_url": self.base_url, "project": project},
)
return None
# XXX(dcramer): document how this is possible, if it even is
if len(metas["projects"]) > 1:
raise ApiError(u"More than one project found matching {}.".format(project))
try:
return metas["projects"][0]
except IndexError:
logger.info(
"jira.get-create-meta.key-error",
extra={"base_url": self.base_url, "project": project},
)
return None
def get_versions(self, project):
return self.get_cached(self.VERSIONS_URL % project)
def get_priorities(self):
return self.get_cached(self.PRIORITIES_URL)
def get_users_for_project(self, project):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(self.USERS_URL, params={"project": project_key})
def search_users_for_project(self, project, username):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(
self.USERS_URL, params={"project": project_key, self.user_query_param(): username}
)
def search_users_for_issue(self, issue_key, email):
return self.get_cached(
self.USERS_URL, params={"issueKey": issue_key, self.user_query_param(): email}
)
def get_user(self, user_id):
user_id_get_param = self.user_id_get_param()
return self.get_cached(self.USER_URL, params={user_id_get_param: user_id})
def create_issue(self, raw_form_data):
data = {"fields": raw_form_data}
return self.post(self.CREATE_URL, data=data)
def get_server_info(self):
return self.get(self.SERVER_INFO_URL)
def get_valid_statuses(self):
return self.get_cached(self.STATUS_URL)
def get_transitions(self, issue_key):
return self.get_cached(self.TRANSITION_URL % issue_key)["transitions"]
def transition_issue(self, issue_key, transition_id):
return self.post(self.TRANSITION_URL % issue_key, {"transition": {"id": transition_id}})
def assign_issue(self, key, name_or_account_id):
user_id_field = self.user_id_field()
return self.put(self.ASSIGN_URL % key, data={user_id_field: name_or_account_id})
def get_email(self, account_id):
user = self.get_cached(self.EMAIL_URL, params={"accountId": account_id})
return user.get("email")
| beeftornado/sentry | src/sentry/integrations/jira/client.py | Python | bsd-3-clause | 8,782 | 0.001822 |
# -*- coding: utf-8 -*-
import json
import datetime
import PyRSS2Gen
from werkzeug.security import generate_password_hash
from mongoengine.errors import NotUniqueError, ValidationError
from flask import make_response
from tasks.email_tasks import send_email_task
from config import Config
from model.models import (User, Diary, Category, Page, Tag, Comment,
CommentEm, StaticPage)
from utils.helper import SiteHelpers
class UserDispatcher(object):
"""User dispatcher.
Return author profile
"""
def get_profile(self):
"""Return User object."""
return User.objects.first()
def generate_user(self, username, password):
"""Generate User"""
user = User(name=username)
user.password = generate_password_hash(password=password)
return user.save()
def delete_user(self):
"""Delete User"""
return User.objects().first().delete()
def get_by_name(self, username):
"""Get user by username
Args:
username: string
Return:
user: user object
"""
return User.objects(name=username).first()
class CommentDispatcher(object):
"""Comment dispatcher.
Retuen comments functons helper.
"""
def add_comment(self, author, diary_id, email, content):
diary = Diary.objects(pk=diary_id)
diary_title = diary.first().title
comment_em = CommentEm(
author=author,
content=content,
email=email
)
diary.update_one(push__comments=comment_em)
comment = Comment(content=content)
comment.diary = diary.first()
comment.email = email
comment.author = author
comment.save(validate=False)
try:
send_email_task(Config.EMAIL,
Config.MAIN_TITLE + u'收到了新的评论, 请查收',
content, diary_id, author, diary_title)
response = make_response(json.dumps({'success': 'true'}))
response.set_cookie('guest_name', author)
response.set_cookie('guest_email', email)
return response
except Exception as e:
return str(e)
def reply_comment(self, author, diary_id, email, content):
diary = Diary.objects(pk=diary_id)
diary_title = diary.first().title
comment_em = CommentEm(
author=u'博主回复',
content=content,
)
diary.update_one(push__comments=comment_em)
''' Save in Comment model for admin manage'''
comment = Comment(content=content)
comment.diary = diary.first()
comment.author = UserDispatcher().get_profile().name
comment.save(validate=False)
try:
send_email_task(email, u'您评论的文章《' + diary_title + u'》收到了来自\
博主的回复, 请查收', content, diary_id, author,
diary_title)
return json.dumps({'success': 'true'})
except Exception as e:
return json.dumps({'success': 'false', 'reason': str(e)})
def get_all_comments(self, order='-publish_time'):
"""Return Total diaries objects."""
return Comment.objects.order_by(order)
def del_comment_by_id(self, comment_id):
"""Comment delete by id.
Also remove comment from diary detail
Args:
comment_id: Object_id.
Return:
None
"""
comment = Comment.objects.get_or_404(pk=comment_id)
diary = Diary.objects(pk=comment.diary.pk)
diary.update_one(pull__comments={'content': comment.content})
return comment.delete()
def get_comment_list(self, start=0, end=20, order='-publish_time'):
"""Comment list.
default query 20 comments and return if there should be next or prev
page.
Args:
start: num defalut 0
end: num defalut 20
order: str defalut '-publish_time'
Return:
next: boolean
prev: boolean
comments: diaries list
"""
size = end - start
prev = next = False
comments = Comment.objects.order_by(order)[start:end + 1]
if len(comments) - size > 0:
next = True
if start != 0:
prev = True
return prev, next, comments[start:end]
class DiaryDispatcher(object):
""" Diary dispatcher.
Return diary collection objects.
"""
def get_all_diaries(self, order='-publish_time'):
"""Return Total diaries objects."""
return Diary.objects.order_by(order)
def get_by_id(self, diary_id):
"""Diary detail.
Only return diary detail by diary_id.
Args:
diary_id: objectID
Return:
diary: diary object
"""
try:
diary = Diary.objects(pk=diary_id).first()
except ValidationError:
diary = None
return diary
def get_diary_width_navi(self, diary_id):
"""Diary Detail Width page navi boolean.
get diary detail and if there should be prev or next page.
Args:
diary_id: objectID
Return:
diary: diary object
prev: boolean, can be used as 'prev' logic
next: boolean, can be used as 'next' logic
"""
prev = next = True
diary = self.get_by_id(diary_id)
if diary == self.get_first_diary():
next = False
if diary == self.get_last_diary():
prev = False
return prev, next, diary
def get_first_diary(self):
"""Return First Diary object."""
return Diary.objects.order_by('-publish_time').first()
def get_last_diary(self):
"""Return Last Diary object."""
return Diary.objects.order_by('publish_time').first()
def get_prev_diary(self, pub_time):
"""Return Previous Diary object."""
return Diary.objects(publish_time__lt=pub_time
).order_by('-publish_time').first()
def get_next_diary(self, pub_time):
"""Return Next Diary object."""
return Diary.objects(publish_time__gt=pub_time
).order_by('-publish_time').first()
def get_next_or_prev_diary(self, prev_or_next, diary_id):
"""Diary route prev or next function.
Use publish_time to determin what`s the routed diary.
Args:
prev_or_next: string 'prev' or 'next'
diary_id: objectID
Return:
next_diary: routed diary object
"""
diary = self.get_by_id(diary_id)
if prev_or_next == 'prev':
next_diary = self.get_prev_diary(diary.publish_time)
else:
next_diary = self.get_next_diary(diary.publish_time)
return next_diary
def get_diary_count(self):
"""Return Diaries total number."""
return Diary.objects.count()
def get_diary_list(self, start=0, end=10, order='-publish_time'):
"""Diary list.
default query 10 diaries and return if there should be next or prev
page.
Args:
start: num defalut 0
end: num defalut 10
order: str defalut '-publish_time'
Return:
next: boolean
prev: boolean
diaries: diaries list
"""
size = end - start
prev = next = False
diaries = Diary.objects.order_by(order)[start:end + 1]
if len(diaries) - size > 0:
next = True
if start != 0:
prev = True
return prev, next, diaries[start:end]
def edit_diary(self, diary_id, title, html, category, tags):
""" Edit diary from admin
receives title, content(html), tags and cagetory
save title, content(html), pure content(further use), tags and cagetory
also auto save author as current_user.
this method will auto save new Category or Tag if not exist otherwise
save in existed none with push only diary_object
Args:
diary_id: diary_id
title: string
html: string
cagetory: string
tags: list
Save:
title: string
html: string
content: string without html tags
category: string
tags: list
summary: first 80 characters in content with 3 dots in the end
author: current_user_object
"""
title = SiteHelpers().secure_filename(title)
category = SiteHelpers().secure_filename(category)
content = SiteHelpers().strip_html_tags(html)
splited_tags = tags.split(',')
author = UserDispatcher().get_profile()
try:
diary = Diary.objects(pk=diary_id).first()
except:
diary = Diary(title=title)
old_cat = diary.category
old_tags = diary.tags
diary.title = title
diary.content = content
diary.category = category
diary.summary = content[0:80] + '...'
diary.html = html
diary.author = author
diary.tags = splited_tags
diary.save()
a, cat = Category.objects.get_or_create(name=category,
defaults={'diaries': [diary]})
if not cat:
Category.objects(name=category).update_one(push__diaries=diary)
if old_cat is not None:
Category.objects(name=old_cat).update_one(pull__diaries=diary)
for t in old_tags:
Tag.objects(name=t).update_one(pull__diaries=diary)
for i in splited_tags:
b, tag = Tag.objects.get_or_create(name=i,
defaults={'diaries': [diary]})
if not tag:
Tag.objects(name=i).update_one(push__diaries=diary)
return
def get_or_create_diary(self, diary_id):
try:
diary = Diary.objects(pk=diary_id).first()
except:
diary = None
categories = Category.objects.all()
return diary, categories
def del_diary_by_id(self, diary_id):
"""Diary delete.
Also delete diary link from category collection
Args:
diary_id: objectID
Return:
None
"""
diary = Diary.objects(pk=diary_id)
Category.objects(name=diary[0].category).update_one(
pull__diaries=diary[0])
return diary.delete()
class CategoryDispatcher(object):
"""Category dispatcher.
Return category objects
"""
def get_all_categories(self, order='-publish_time'):
"""Return Total Categories objects."""
return Category.objects.order_by(order)
def get_diary_list_with_navi(self, cat_name, start=0, end=10,
order='-publish_time'):
"""Category Diary list.
default query 10 diaries and return if there should be next or prev
page.
Args:
cat_name: string
start: num defalut 0
end: num defalut 10
order: str defalut '-publish_time'
Return:
next: boolean
prev: boolean
diaries: diaries list
"""
size = end - start
prev = next = False
diaries = Diary.objects(category=cat_name).order_by(order)[start:
end + 1]
if len(diaries) - size > 0:
next = True
if start != 0:
prev = True
return prev, next, diaries[start:end]
def get_category_count(self):
"""Return Categories total number."""
return Category.objects.count()
def add_new_category(self, cat_name):
"""Category add new.
Will check if the cat_name is unique, otherwise will return an error.
Args:
cat_name: string category name.
Return:
None
"""
cat_name = SiteHelpers().secure_filename(cat_name)
try:
category = Category(name=cat_name)
return category.save()
except NotUniqueError:
return 'category name not unique'
def get_category_detail(self, cat_id):
"""Category detail.
will return category detail by category id.
Args:
cat_name: string category name.
Return:
category: category object
"""
return Category.objects(pk=cat_id).first()
def del_category_by_name(self, cat_name):
"""Category delete by name.
Will check if the cat_name is unique, otherwise will return an error.
Args:
cat_name: string category name.
Return:
None
"""
return Category.objects.get_or_404(name=cat_name).delete()
class TagDispatcher(object):
"""Tag dispatcher.
Return tag objects
"""
def get_all_tags(self, order='-publish_time'):
"""Return Total Tags objects."""
return Category.objects.order_by(order)
def get_diary_list_with_navi(self, tag_name, start=0, end=10,
order='-publish_time'):
"""Tag Diary list.
default query 10 diaries and return if there should be next or prev
page.
Args:
tag_name: string
start: num defalut 0
end: num defalut 10
order: str defalut '-publish_time'
Return:
next: boolean
prev: boolean
diaries: diaries list
"""
size = end - start
prev = next = False
diaries = Diary.objects(tags=tag_name).order_by(order)[start: end + 1]
if len(diaries) - size > 0:
next = True
if start != 0:
prev = True
return prev, next, diaries[start:end]
def get_tag_count(self):
"""Return Tags total number."""
return Tag.objects.count()
def add_new_tag(self, tag_name):
"""Tag add new.
Will check if the cat_name is unique, otherwise will return an error.
Args:
tag_name: string category name.
Return:
None
"""
tag_name = SiteHelpers().secure_filename(tag_name)
try:
category = Tag(name=tag_name)
return category.save()
except NotUniqueError:
return 'tag name not unique'
def get_tag_detail(self, tag_name):
"""Tag detail.
will return tag detail by tag name.
Args:
tag_name: string tag name.
Return:
tag: tag object
"""
return Tag.objects(name=tag_name).first()
class PageDispatcher(object):
"""Page dispatcher.
Return page objects
"""
def get_all_pages(self, order='-publish_time'):
return Page.objects.order_by(order)
def get_page(self, page_url):
return Page.objects(url=page_url).first()
def get_static_page(self, page_url):
return StaticPage.objects(url=page_url).first()
def get_all_static_pages(self, order='-publish_time'):
return StaticPage.objects.order_by(order)
def del_cmspage_by_url(self, page_url):
return StaticPage.objects.get_or_404(url=page_url).delete()
def edit_or_create_page(self, title, html, url):
"""CMS page edit or create.
Action for CMS Page.
Receives title, content(html), tags and cagetory
Save title, content(html), pure content(further use), page_url
also auto save author as current_user.
Args:
POST:
title: page title
html: content html
url: page url
GET:
page_url: string
Returns:
POST:
none (for create or save page only)
GET:
page object or none
Save:
title: string
html: string
content: string without html tags
url: string page_url
summary: first 80 characters in content with 3 dots in the end
author: current_user_object
"""
title = SiteHelpers().secure_filename(title)
content = SiteHelpers().strip_html_tags(html)
author = UserDispatcher().get_profile()
created = StaticPage.objects(url=url)
if created:
page = created.first()
page.title = title
else:
page = StaticPage(title=title,
url=SiteHelpers().secure_filename(url))
page.content = content
page.summary = content[0:80] + '...'
page.html = html
page.author = author
return page.save()
class OtherDispatcher(object):
def up_img_to_upyun(self, collection, data, filename):
"""Up image to upyun collecton.
Will get back upyun link.
Args:
collection: string, collection name
data: image data
filename: filename
Return:
success: boolean, True/False
url: url_link
"""
filename = SiteHelpers().secure_filename(filename)
success, url = SiteHelpers().up_to_upyun(collection, data, filename)
return success, url
def get_rss(self, size):
""" RSS2 Support.
support xml for RSSItem with sized diaries.
Args:
none
Return:
rss: xml
"""
articles = Diary.objects.order_by('-publish_time')[:size]
items = []
for article in articles:
content = article.html
url = Config.SITE_URL + '/diary/' + str(article.pk) + '/' + \
article.title
items.append(PyRSS2Gen.RSSItem(
title=article.title,
link=url,
description=content,
guid=PyRSS2Gen.Guid(url),
pubDate=article.publish_time,
))
rss = PyRSS2Gen.RSS2(
title=Config.MAIN_TITLE,
link=Config.SITE_URL,
description=Config.DESCRIPTION,
lastBuildDate=datetime.datetime.now(),
items=items
).to_xml('utf-8')
return rss
| ScenK/Dev_Blog2 | blog/dispatcher/__init__.py | Python | bsd-3-clause | 18,473 | 0.000109 |
import os
import requests
import socket
import subprocess
import unittest
import json
import ray
from ray.rllib import _register_all
from ray.tune.trial import Trial, Resources
from ray.tune.web_server import TuneClient
from ray.tune.trial_runner import TrialRunner
def get_valid_port():
port = 4321
while True:
try:
print("Trying port", port)
port_test_socket = socket.socket()
port_test_socket.bind(("127.0.0.1", port))
port_test_socket.close()
break
except socket.error:
port += 1
return port
class TuneServerSuite(unittest.TestCase):
def basicSetup(self):
# Wait up to five seconds for placement groups when starting a trial
os.environ["TUNE_PLACEMENT_GROUP_WAIT_S"] = "5"
# Block for results even when placement groups are pending
os.environ["TUNE_TRIAL_STARTUP_GRACE_PERIOD"] = "0"
ray.init(num_cpus=4, num_gpus=1)
port = get_valid_port()
self.runner = TrialRunner(server_port=port)
runner = self.runner
kwargs = {
"stopping_criterion": {
"training_iteration": 3
},
"resources": Resources(cpu=1, gpu=1),
}
trials = [Trial("__fake", **kwargs), Trial("__fake", **kwargs)]
for t in trials:
runner.add_trial(t)
client = TuneClient("localhost", port)
return runner, client
def tearDown(self):
print("Tearing down....")
try:
self.runner._server.shutdown()
self.runner = None
except Exception as e:
print(e)
ray.shutdown()
_register_all()
def testAddTrial(self):
runner, client = self.basicSetup()
for i in range(3):
runner.step()
spec = {
"run": "__fake",
"stop": {
"training_iteration": 3
},
"resources_per_trial": {
"cpu": 1,
"gpu": 1
},
}
client.add_trial("test", spec)
runner.step()
all_trials = client.get_all_trials()["trials"]
runner.step()
self.assertEqual(len(all_trials), 3)
def testGetTrials(self):
runner, client = self.basicSetup()
for i in range(3):
runner.step()
all_trials = client.get_all_trials()["trials"]
self.assertEqual(len(all_trials), 2)
tid = all_trials[0]["id"]
client.get_trial(tid)
runner.step()
self.assertEqual(len(all_trials), 2)
def testGetTrialsWithFunction(self):
runner, client = self.basicSetup()
test_trial = Trial(
"__fake",
trial_id="function_trial",
stopping_criterion={"training_iteration": 3},
config={"callbacks": {
"on_episode_start": lambda x: None
}})
runner.add_trial(test_trial)
for i in range(3):
runner.step()
all_trials = client.get_all_trials()["trials"]
self.assertEqual(len(all_trials), 3)
client.get_trial("function_trial")
runner.step()
self.assertEqual(len(all_trials), 3)
def testStopTrial(self):
"""Check if Stop Trial works."""
runner, client = self.basicSetup()
for i in range(2):
runner.step()
all_trials = client.get_all_trials()["trials"]
self.assertEqual(
len([t for t in all_trials if t["status"] == Trial.RUNNING]), 1)
tid = [t for t in all_trials if t["status"] == Trial.RUNNING][0]["id"]
client.stop_trial(tid)
runner.step()
all_trials = client.get_all_trials()["trials"]
self.assertEqual(
len([t for t in all_trials if t["status"] == Trial.RUNNING]), 0)
def testStopExperiment(self):
"""Check if stop_experiment works."""
runner, client = self.basicSetup()
for i in range(2):
runner.step()
all_trials = client.get_all_trials()["trials"]
self.assertEqual(
len([t for t in all_trials if t["status"] == Trial.RUNNING]), 1)
client.stop_experiment()
runner.step()
self.assertTrue(runner.is_finished())
self.assertRaises(
requests.exceptions.ReadTimeout,
lambda: client.get_all_trials(timeout=1))
def testCurlCommand(self):
"""Check if Stop Trial works."""
runner, client = self.basicSetup()
for i in range(2):
runner.step()
stdout = subprocess.check_output(
"curl \"http://{}:{}/trials\"".format(client.server_address,
client.server_port),
shell=True)
self.assertNotEqual(stdout, None)
curl_trials = json.loads(stdout.decode())["trials"]
client_trials = client.get_all_trials()["trials"]
for curl_trial, client_trial in zip(curl_trials, client_trials):
self.assertEqual(curl_trial.keys(), client_trial.keys())
self.assertEqual(curl_trial["id"], client_trial["id"])
self.assertEqual(curl_trial["trainable_name"],
client_trial["trainable_name"])
self.assertEqual(curl_trial["status"], client_trial["status"])
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
| pcmoritz/ray-1 | python/ray/tune/tests/test_tune_server.py | Python | apache-2.0 | 5,470 | 0 |
#!/usr/bin/env python
'''
What is a FASTA format file/string?
This module follows the NCBI conventions: http://blast.ncbi.nlm.nih.gov/blastcgihelp.shtml
'''
import cStringIO
import math
def idFromName(line):
'''
line: a fasta nameline
returns: an id parsed from the fasta nameline. The id is the first
whitespace separated token after an optional namespace, etc. See the
examples below. This covers a lot of cases that a sane person would put on
a nameline. So needless to say it covers very few cases. Examples in the
form nameline => return value:
id => id
id desc => id
>id => id
>id desc => id
>ns|id => id
>ns|id desc => id
>ns|id| => id
>ns|id|desc => id
ns|id => id
ns|id desc => id
ns|id| => id
ns|id|desc => id
ns|id blah|desc => id
Example namelines not covered:
JGI-PSF GENOMES ftp://ftp.jgi-psf.org/pub/JGI_data/Nematostella_vectensis/v1.0/annotation/proteins.Nemve1FilteredModels1.fasta.gz
>jgi|Nemve1|18|gw.48.1.1
>jgi|Nemve1|248885|estExt_fgenesh1_pg.C_76820001
'''
# This could probably be done with one regex, but I am too stupid and this way I can read it.
# remove the leading '>' if there is one.
if line.startswith('>'):
line = line[1:]
# keep only everything after the first pipe. will keep everything if there is no first pipe.
pipe = line.find('|')
if pipe > -1:
line = line[line.find('|')+1:]
# keep everything before the second pipe. will keep everything if there is no second pipe.
pipe = line.find('|')
if pipe > -1:
line = line[:pipe]
# return the first token as the id.
return line.split()[0]
def prettySeq(seq, n=60):
'''
seq: one long bare (no nameline) sequence. e.g.
MASNTVSAQGGSNRPVRDFSNIQDVAQFLLFDPIWNEQPGSIVPWKMNREQALAERYPELQTSEPSEDYSGPVESLELLPLEIKLDIMQYLSWEQISWCKHPWLWTRWYKDNVVRVSAITFED
n: maximum length of sequence lines
returns: seq split over multiple lines, all terminated by newlines.
'''
if len(seq) == 0:
raise Exception('zero-length sequence', seq)
seq = ''.join(seq.strip().split())
chunks = int(math.ceil(len(seq)/float(n)))
pretty = ''
for i in range(chunks):
pretty += seq[i*n:(i+1)*n] + '\n'
return pretty
def numSeqsInFastaDb(path):
num = 0
with open(path) as fh:
for line in fh:
if line.startswith('>'):
num += 1
return num
def readIds(fastaFile):
'''
fastaFile: a file-like object or a path to a fasta file
Yields each id in each nameline in each sequence in the fasta file.
'''
for nameline in readNamelines(fastaFile):
yield idFromName(nameline)
def readNamelines(fastaFile):
'''
fastaFile: a file-like object or a path to a fasta file
Yields each nameline in each sequence in the fasta file.
'''
for nameline, seq in readFasta(fastaFile):
yield nameline
def readFasta(fastaFile):
'''
fastaFile: a file-like object or a path to a fasta file
Yields a tuple of (nameline, sequence) for each sequence in the fasta file.
Newlines are stripped from the nameline and sequence lines, and the sequence
lines are concatenated into one long sequence string.
Here is an examle (edited for length):
('>sp|P31946|1433B_HUMAN',
'MTMDKSELVQKAKLAEQAERYDDMAAAMKAVTEQGHELSNEERNLLSVAYKNVVGARRSSWRVISSIEQKT')
'''
for lines in readFastaLines(fastaFile):
nameline = lines[0].strip()
seq = ''.join((l.strip() for l in lines[1:]))
yield nameline, seq
def readFastaLines(fastaFile):
'''
fastaFile: a file-like object or a path to a fasta file
yields: a seq of fasta sequence lines for each sequence in the fasta file.
the first line is the nameline. the other lines are the sequence data lines. lines include newlines.
'''
if isinstance(fastaFile, basestring):
with open(fastaFile) as fh:
for lines in relaxedFastaSeqIter(fh):
yield lines
else:
for lines in relaxedFastaSeqIter(fastaFile):
yield lines
def splitSeq(seq):
'''
seq: a well-formed fasta sequence string containing a single nameline, including '>' and sequence data lines.
returns: tuple of nameline, including '>', without a newline, and concatenated sequence lines, without newlines
e.g. ['>blahname', 'AFADFDSAFAFAFAFFAFAF']
'''
lines = seq.splitlines()
name = lines[0].strip()
chars = ''.join([l.strip() for l in lines[1:]])
return [name, chars]
def relaxedFastaSeqIter(filehandle):
'''
Parse the lines in filehandle, first removing any blank lines, and then
yielding all well-formed fasta sequences and ignoring badly-formed seqs. A
well-formed sequence has exactly one nameline followed by one or more
sequence lines.
Well-formed example:
>sp|P27348|1433T_HUMAN
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
Badly-formed example (no nameline):
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
Badly-formed example (no sequence lines):
>sp|P27348|1433T_HUMAN
'''
# lines guaranteed to have no blank lines by filterBlanks()
# lines guaranteed to have have exactly one nameline as the first
# element (except possibly the first lines yielded, which might not
# have a nameline if the filehandle starts with a sequence line).
for lines in splitFastaOnNamelines(filterBlanks(filehandle)):
if lines[0][0] == '>' and len(lines) >= 2:
yield lines
def filterBlanks(lines):
'''
Yield each line in lines that contains non-whitespace characters.
Used to remove blank lines from FASTA files.
'''
for line in lines:
if line.strip():
yield line
def splitFastaOnNamelines(filehandle):
'''
Split the lines in filehandle on namelines. Yields a seq of lines, where
the first line in the seq is a nameline (except if filehandle starts with a
non-nameline) and the other lines are lines until the next nameline or the
end of the file. Lines include newlines. The seq of lines will always
contain at least one line. Only the first line will ever be a nameline.
Example input (note that this is not well-formed fasta, since it starts
with a sequence line, has a nameline with no sequence lines, and has blank
lines within a sequence):
VLSSIEQKSNEEGSEEKGPEVREYREKVETELQGVCDTVLGLLDSHLIKEAGDAESRVFY
>sp|P31947|1433S_HUMAN
>sp|P27348|1433T_HUMAN
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
>sp|P63104|1433Z_HUMAN
MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR
MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE
Example yielded output (note how every sequence except the first starts
with a nameline):
yield ['VLSSIEQKSNEEGSEEKGPEVREYREKVETELQGVCDTVLGLLDSHLIKEAGDAESRVFY\n']
yield ['>sp|P31947|1433S_HUMAN\n']
yield ['>sp|P27348|1433T_HUMAN\n',
'MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR\n',
'\n',
'EGAEN\n',
'\n']
yield ['>sp|P63104|1433Z_HUMAN\n',
'\n',
'MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR\n',
'MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE\n']
Well-formed example input (note how the first line of the input is a
nameline):
>sp|P27348|1433T_HUMAN
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
>sp|P63104|1433Z_HUMAN
MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR
MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE
Well-formed example output (note how the first element of the first yielded
list is a nameline):
yield ['>sp|P27348|1433T_HUMAN\n',
'MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR\n',
'EGAEN\n']
yield ['>sp|P63104|1433Z_HUMAN\n',
'MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR\n',
'MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE\n']
'''
lines = []
for line in filehandle:
if line and line[0] == '>': # a nameline
if lines:
yield lines # yield current sequence
lines = [line] # start new sequence
else:
lines.append(line) # add to current sequence
if lines:
yield lines # yield current sequence
def isNameLine(line):
return line.startswith('>')
def head(query, n):
'''returns the first n sequences in query.'''
count = 0
headstr = ''
for line in query.splitlines(keepends=1):
if line.startswith('>'):
count += 1
if count > n: break
headstr += line
return headstr
def dbSize(query):
'''returns the number of sequence characters'''
size = 0
for line in query.splitlines():
if isNameLine(line):
continue
size += len(line.strip())
return size
def numChars(query):
'''
synonym for dbSize(). returns the number of character (e.g. bases or residues for nucleotide or protein sequences).
'''
return dbSize(query)
def numSeqs(query):
'''
synonym for size(), whose name is a little more specific as to what is being measured: the number of sequences.
'''
return size(query)
def size(query):
'''
query: string containing fasta formatted seqeunces
returns: the number of sequences
'''
fh = cStringIO.StringIO(query)
size = numSeqsInFile(fh)
fh.close()
return size
def numSeqsInFile(file):
'''
file: file like object containing fasta formatted sequences
'''
return sum([1 for line in file if isNameLine(line.strip())])
def numSeqsInPath(path):
'''
path: path to fasta formatted db
returns: number of sequences in fasta db
'''
fh = open(path)
size = numSeqsInFile(fh)
fh.close()
return size
def main():
pass
if __name__ == '__main__':
main()
##################
# DECPRECATED CODE
##################
def fastaSeqIterOld(filehandle, strict=True):
'''
filehandle: file object containing fasta-formatted sequences.
ignoreParseError: if True, parsing ignores namelines that do not have sequence character lines. For example, '>foo\n>bar\nABCD\n'
would yield the 'bar' sequence, ignoring the 'foo' sequence that has no sequence characters associated with it.
In all cases blank lines are ignored, no matter where they occur.
Generator function yielding a string representing a single fasta sequence (name line including '>' and sequence lines)
for each fasta sequence in filehandle.
returns: a generator.
notes:
This function was modified from fastaSeqIterStrict to handle bogus fasta input like this:
>ORFP:20136 YOL048C, Contig c378 4079-4399 reverse complement
MLFKVSNFTSLTLLSLIPIVGPILANQLMAPKRTFTYLQRYFLLKGFSKKQAKDFQYEHYASFICFGMSAGLLELIPFFTIVTISSNTVGAAKWCSSLLKGERKKD*
>ORFP:18671 , Contig c238 100299-100300 reverse complement
>ORFP:20137 , Contig c378 4878-5189 reverse complement
MKVGIELISHSQTSHGTHVNSTVLAEKTPQPLEKPSKEHSISKESNINRWLKI
LRRQFDIWFPETIPTMKVRYELLKKNFIKEIFNSRAFIYPFLVSILYYLY*
The old function, even with error handling turned on, would not concatenate all the sequence characters of the 3rd sequence
since they are separated by a blank line.
'''
# states: seeking_nameline, seeking_seqline, in_seq.
state = 'seeking_nameline'
fasta = ''
for line in filehandle:
# ignore all blank lines
if not line.strip():
continue
elif state == 'seeking_nameline' and line.startswith('>'):
state = 'seeking_seqline'
fasta = line
elif state == 'seeking_nameline' and not ignoreParseError:
raise Exception('FASTA parse error. Looking for name line and found line which is neither blank nor nameline. line=%s'%line)
elif state == 'seeking_seqline' and line.startswith('>'):
if ignoreParseError:
# skip nameline without sequence and restart with this nameline.
state = 'seeking_seqline'
fasta = line
else:
raise Exception('FASTA parse error. Looking for sequence line and found name line. line=%s'%line)
elif state == 'seeking_seqline':
state = 'in_seq'
fasta += line
elif state == 'in_seq' and line.startswith('>'):
yield fasta
state = 'seeking_seqline'
fasta = line
elif state == 'in_seq':
fasta += line
else:
raise Exception('FASTA parse error. Unrecognized state. state=%s, line=%s'%(state, line))
if state == 'in_seq':
yield fasta
elif state == 'seeking_seqline' and not ignoreParseError:
raise Exception('FASTA parse error. Looking for sequence line and found end of file.')
elif state == 'seeking_nameline':
pass
else:
raise Exception('FASTA parse error. Unrecognized state found at end of file. state=%s'%state)
def readFastaLinesOld(fastaFile, strict=True, goodOnly=True, filterBlankLines=False):
'''
fastaFile: a file-like object or a path to a fasta file
yields: a seq of fasta sequence lines for each sequence in the fasta file.
the first line is the nameline. the other lines are the sequence data lines. lines include newlines.
'''
if isinstance(fastaFile, basestring):
with open(fastaFile) as fh:
for lines in _fastaSeqIter(fh, strict, goodOnly, filterBlankLines):
yield lines
else:
for lines in _fastaSeqIter(fastaFile, strict, goodOnly, filterBlankLines):
yield lines
def _fastaSeqIter(filehandle, strict=True, goodOnly=True, filterBlankLines=False):
'''
filehandle: file object containing fasta-formatted sequences.
strict: if True, raise an exception when a malformed fasta sequence is encountered.
A malformed sequence is a sequence with a blank line, a sequence line not preceded by a nameline, or a nameline not followed by a sequence line.
E.g. a nameline directly after a nameline, like '>foo\n>bar\nCTAGCTAGGGCA\n'
goodOnly: if True, only yield well-formed fasta sequences, ones with a nameline and one or more sequence datalines and no blank lines.
if False and strict is False, all sequences, malformed or otherwise, will be yielded. there will always be at least one (possibly blank) line.
filterBlankLines: if True, no blank lines (lines only containing whitespace) will be yielded and blank lines will not raise an exception.
Parses the filehandle, yielding one fasta sequence at a time.
yields: a seq of fasta sequence lines. the first line is the nameline. the other lines are the sequence data lines.
'''
for lines in _splitOnNamelines(filehandle, filterBlankLines):
if not lines[0] or lines[0][0] != '>':
if strict:
raise Exception('FASTA error: sequence must start with a nameline.', lines)
elif not goodOnly:
yield lines
elif len(lines) < 2:
if strict:
raise Exception('FASTA error: sequence must contain at least one sequence data line.', lines)
elif not goodOnly:
yield lines
elif '' in (line.strip() for line in lines): # contains a blank line
if strict:
raise Exception('FASTA error: blank lines not allowed')
elif not goodOnly:
yield lines
else: # a good sequence
yield lines
def _splitOnNamelines(filehandle, filterBlankLines=False):
'''
Split the lines in filehandle on namelines. Example nameline: '>lcl|12345'
Yields a seq of lines, where the first line is a nameline (except if filehandle starts with a non-nameline) and the other lines are lines until the next nameline
or the end of the file. Lines include newlines. The seq of lines will always contain at least one line. Only the first line will ever be a nameline.
filterBlankLines: if True, no blank lines (lines only containing whitespace) will be yielded.
>sp|P27348|1433T_HUMAN
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
>sp|P63104|1433Z_HUMAN
MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR
MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE
'''
lines = []
for line in filehandle:
if line and line[0] == '>': # a nameline
if lines:
yield lines # yield current sequence
lines = [line] # start new sequence
elif not filterBlankLines: # a blank or non-blank line
lines.append(line) # add to current sequence
elif line.strip(): # a non-blank line
lines.append(line) # add to current sequence
if lines:
yield lines # yield current sequence
def _fastaSeqIter2(filehandle):
'''
Untested code.
This is a state-machine parser for FASTA formatted files. The page
http://blast.ncbi.nlm.nih.gov/blastcgihelp.shtml describes this format.
This parser is tolerant of blank lines that come before a nameline and/or
after the sequence lines of a sequence, but not within a sequence.
This is OK because it contains a blank line after the sequence lines of the
first sequence and before the sequence lines of the second sequence:
>sp|P27348|1433T_HUMAN
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
>sp|P63104|1433Z_HUMAN
MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR
MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE
This is NOT ok because one sequence contains a blank line after then
nameline and before the sequence lines and the other sequence contains a
blank line between two sequence lines:
>sp|P27348|1433T_HUMAN
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
>sp|P63104|1433Z_HUMAN
MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR
MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE
Sequences must start with a nameline. This is NOT ok:
MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR
EGAEN
Sequences must contain at least one sequence line. This is NOT ok:
>sp|P63104|1433Z_HUMAN
For every sequence in the fasta file, this parser yields the lines of each
sequence in the fasta file. Here is an example of what it yields:
['>sp|P27348|1433T_HUMAN\n',
'MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR\n',
'EGAEN\n']
This parser will raise an Exception
'''
# parsing states
LFN = 'Looking for nameline'
LFS = 'Looking for sequence line'
LFA = 'Looking for any line' # a sequence or a nameline
# line states: blank, nameline (starts with '>'), seq line (starts with
# anything else), or EOF.
BL = 'blank line'
NL = 'nameline'
SL = 'sequence line'
state = LFN
lines = []
for line in filehandle:
linestate = BL if not line.strip() else NL if line[0] == '>' else SL
if state == LFN:
if linestate == BL:
continue
elif linestate == NL:
lines = [line] # Start a new sequence
state = LFS # Found nameline. Now look for seq lines.
elif linestate == SL:
raise Exception(PARSING_ERROR, 'Expecting a {} or {}. Found a {}'.format(BL, NL, SL), line, state, linestate)
else:
raise Exception(PARSING_ERROR, 'Unrecognized line state', line, state, linestate)
elif state == LFS:
if linestate == BL:
raise Exception(PARSING_ERROR, 'Expecting a {}. Found a {}'.format(SL, BL), line, state, linestate)
elif linestate == NL:
raise Exception(PARSING_ERROR, 'Expecting a {}. Found a {}'.format(SL, NL), line, state, linestate)
elif linestate == SL:
lines.append(line) # Add to the current sequence
state = LFA # Found a seq line, Now look for more seq lines or a new sequence.
else:
raise Exception(PARSING_ERROR, 'Unrecognized line state', line, state, linestate)
elif state == LFA:
if linestate == BL:
yield lines # Emit the current sequence
lines = []
state = LFN # Look for a new sequence.
elif linestate == NL:
yield lines # Emit the current sequence
lines = [line] # Start a new sequence
state = LFS # Found nameline. Now look for seq lines.
elif linestate == SL:
lines.append(line) # Add to the current sequence.
else:
raise Exception(PARSING_ERROR, 'Unrecognized line state', line, state, linestate)
else:
raise Exception(PARSING_ERROR, 'Unrecognized parsing state', line, state, linestate)
# EOF
if state == LFN:
pass # Done
elif state == LFS:
raise Exception(PARSING_ERROR, 'Expecting a {}. Found an EOF.'.format(SL), state)
elif state == LFA:
yield lines # Emit the current sequence
pass # Done
else:
raise Exception(PARSING_ERROR, 'Unrecognized parsing state', state)
| todddeluca/tfd | tfd/fasta.py | Python | mit | 21,889 | 0.004203 |
# Django settings for project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '01234567890123456789012345678901234567890123456789'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'project.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
)
| fmierlo/django-default-settings | release/1.2/project/settings.py | Python | bsd-3-clause | 3,293 | 0.002126 |
import argparse
import random
import time
import sys
parser = argparse.ArgumentParser('eligibility')
parser.add_argument('infile', \
help='the file containing the list of names to randomly sort')
parser.add_argument('-s', '--spots', metavar='num', required=True, type=int, \
help='the number of spots available on campus')
parser.add_argument('outfile', \
help='the file to output the results to')
parser.add_argument('-d', '--delay', metavar='seconds', required=False, \
type=float, default=0.5, help='the delay between selections in '
'decimal seconds (0.5 by default)')
MCM_CREST = """
`/:. .:+.
`///. McMURTRY COLLEGE .///. ``
.///. .///- ``
-///. .-:////+ooo+/:-. .///-```
-///. .:oyhhdyyddshydhydhhyo:. .///-````
`-/++sydyysddshhdsshddoyddddhys++/-````
`/hhhsyhsyhddyyyhhysshdddhsdhdhh/```
.shdssyohhhyo//+////+//+yhhhyyosyys-
:yyyyy . + /+sssyo . yyyhy:
:hhsyh .sdho:+sdddddm+ os . hhhhh:
.hdsyyh `oddyoyymdmmdds ydd/``-:hyhhdy.
ohhdhhd `.:ddmdddddddd- + o-. hdssyy/
`hdhyyhh -`-ymmmddddms..s-- hdhhhdh.
-hdyhydh /o:/mdmdmmdy: :h+ hyyyhhh:
-hdshydd /ymddhhhoydhohy/:+h dhyyyhh:
`hdsyyddo /s+o-+hhhdmddddooy +ddysydh.
sdhhhddh/ ` +ddd+sdddy/+/ yddhyyh+`
.hdhyyyyys: .oyoydddo-+ddhs/. +ydddhyy-
+hsyhhddho` :yhodoo+yssddds. sddyyyhh/
+yyddhhdddy.`.-:/::+ymdhs:`` +hddhyhyy/
:-``/shddddddddyo+/+oso+s++ooosdddhyhddy:```-:
-oo::/+shdddddddddddddddddhdddddhyhdhyo///:+o:
`sdhs-``/ydddhdddddddhhddddddhyhdhs:``-ohds.-.
`+hdy:+o- `:ohhddddddddddddddyhhho. -o+:yho+.`
`:hdy: -o. -/oyhdddddddhyso:. `o- :ydh:`
`oyds- :hydddhoy: -omyo.
-yh+ -yyhs:+yy: +hh-
sys///ss`
`+osso+`
"""
def welcome(spots):
"""
Prints the McMurtry crest to stdout. Returns when the user confirms the
start of the program by typing any key.
Arguments:
spots - the number of spots that the program will allocate for housing
Returns:
none
"""
print MCM_CREST
print 'Welcome to McMurtry College Eligibility Jack.'
print 'This program will randomly allocate ' + str(spots) \
+ ' spots for housing.'
print 'Hit any key to begin...'
raw_input('')
def run_eligibility(names_file, spots, delay=0.5):
"""
Randomly sorts the provided names into two lists, one that is receiving
housing and another that is a housing wait list. The number of spots for
the former is determined by the variable passed to the function.
Arguments:
names_file - the path of the file containing a line separated list of
names
spots - the number of spots to allocate for housing
delay (optional) - the delay between successive picks, default is 0.5
Returns:
the list of students who were picked for on campus housing;
the list of students (in order) who were picked to be on the wait list
"""
on_campus = []
waitlist = []
try:
with open(names_file, 'r') as names_f:
lines = names_f.readlines();
names = map(lambda l: l.rstrip('\n'), lines);
if spots > len(names):
print >> sys.stderr, 'Number of spots greater than names ' + \
'list. No need for eligibility jack.'
sys.exit(-1)
print 'Receiving on campus housing:\n'
num = 1
while names:
name = random.choice(names)
names.remove(name)
time.sleep(delay)
if num > spots:
print str(num - spots) + ': ' + name
waitlist.append(name)
else:
print str(num) + ': ' + name
on_campus.append(name)
if num == spots:
print '\nHousing Waitlist:\n'
num += 1
except IOError:
print >> sys.stderr, 'There was an error opening the specified' + \
' file \'' + names_file +'\' for read.'
return on_campus, waitlist
def write_results(out_file, on_campus, waitlist):
"""
Writes the specified lists of students to a file in the same format that
run_eligibility prints to stdout.
Arguments:
out_file - the path of the file to write the results to
on_campus - the list of students selected for on-campus housing
waitlist - the list of students (in order) who were selected for the
wait list
Returns:
none
"""
try:
with open(out_file, 'w') as out_f:
out_f.write('Receiving on campus housing:\n')
for name_i in xrange(len(on_campus)):
out_f.write(str(name_i + 1) + ': ' + on_campus[name_i] + '\n')
out_f.write('\nHousing Waitlist:\n')
for name_i in xrange(len(waitlist)):
out_f.write(str(name_i + 1) + ': ' + waitlist[name_i] + '\n')
except IOError:
print >> sys.stderr, 'There was an error opening the specified' + \
' file \'' + out_file +'\' for write.'
# Main runner for the program.
if __name__ == '__main__':
args = parser.parse_args();
welcome(args.spots)
oc, wl = run_eligibility(args.infile, args.spots, args.delay)
write_results(args.outfile, oc, wl)
| wallaceicy06/Eligibility | eligibility.py | Python | mit | 5,750 | 0.003304 |
from __future__ import with_statement
from robot.api import logger
class WrongStat(AssertionError):
ROBOT_CONTINUE_ON_FAILURE = True
def get_total_stats(path):
return get_all_stats(path)[0]
def get_tag_stats(path):
return get_all_stats(path)[1]
def get_suite_stats(path):
return get_all_stats(path)[2]
def get_all_stats(path):
logger.info('Getting stats from <a href="file://%s">%s</a>' % (path, path),
html=True)
stats_line = _get_stats_line(path)
logger.debug('Stats line: %s' % stats_line)
total, tags, suite = eval(stats_line)
return total, tags, suite
def _get_stats_line(path):
prefix = 'window.output["stats"] = '
with open(path) as file:
for line in file:
if line.startswith(prefix):
return line[len(prefix):-2]
def verify_stat(stat, *attrs):
expected = dict(_get_expected_stat(attrs))
if stat != expected:
raise WrongStat('\n%-9s: %s\n%-9s: %s' % ('Got', stat, 'Expected', expected))
def _get_expected_stat(attrs):
for key, value in (a.split(':', 1) for a in attrs):
value = int(value) if value.isdigit() else str(value)
yield str(key), value
| Senseg/robotframework | atest/robot/output/html_output_stats.py | Python | apache-2.0 | 1,194 | 0.005863 |
# oppia/api/media.py
from django.conf import settings
from django.contrib.auth import authenticate
from django.http import HttpResponseRedirect, Http404, HttpResponse, JsonResponse
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.csrf import csrf_exempt
from django.contrib import messages
from oppia.api.publish import get_messages_array
from oppia.av.models import UploadedMedia
from oppia.av import handler
@csrf_exempt
def upload_view(request):
# get the messages to clear possible previous unprocessed messages
get_messages_array(request)
if request.method != 'POST':
return HttpResponse(status=405)
required = ['username', 'password']
validation_errors = []
for field in required:
if field not in request.POST:
validation_errors.append("field '{0}' missing".format(field))
# authenticate user
username = request.POST.get("username")
password = request.POST.get("password")
user = authenticate(username=username, password=password)
if user is None or not user.is_active:
messages.error(request, "Invalid username/password")
response_data = {
'message': _('Authentication errors'),
'messages': get_messages_array(request)
}
return JsonResponse(response_data, status=401)
if validation_errors:
return JsonResponse({'errors': validation_errors}, status=400, )
result = handler.upload(request, user)
if result['result'] == UploadedMedia.UPLOAD_STATUS_SUCCESS:
media = result['media']
embed_code = media.get_embed_code(request.build_absolute_uri(media.file.url))
return JsonResponse({'embed_code': embed_code}, status=201)
else:
response = {'messages': result['errors']}
return JsonResponse(response, status=400)
| DigitalCampus/django-nurhi-oppia | oppia/api/media.py | Python | gpl-3.0 | 1,856 | 0.001078 |
# Copyright 2015, 2017 Jairo Llopis <[email protected]>
# Copyright 2016 Tecnativa, S.L. - Vicent Cubells
# Copyright 2018 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Date & Time Formatter",
"summary": "Helper functions to give correct format to date[time] fields",
"version": "12.0.1.0.0",
"category": "Tools",
"website": "https://github.com/OCA/server-tools",
"author": "Grupo ESOC Ingeniería de Servicios, "
"Tecnativa, "
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"installable": True,
"depends": [
"base",
],
}
| Vauxoo/server-tools | datetime_formatter/__manifest__.py | Python | agpl-3.0 | 666 | 0 |
# -*- coding: utf-8 -*-
""" Python KNX framework
License
=======
- B{PyKNyX} (U{https://github.com/knxd/pyknyx}) is Copyright:
- © 2016-2017 Matthias Urlichs
- PyKNyX is a fork of pKNyX
- © 2013-2015 Frédéric Mantegazza
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
or see:
- U{http://www.gnu.org/licenses/gpl.html}
Module purpose
==============
Datapoint Types management.
Implements
==========
- B{DPTXlatorDate}
Usage
=====
see L{DPTXlatorBoolean}
Note
====
KNX century encoding is as following:
- if byte year >= 90, then real year is 20th century year
- if byte year is < 90, then real year is 21th century year
Python time module does not encode century the same way:
- if byte year >= 69, then real year is 20th century year
- if byte year is < 69, then real year is 21th century year
The DPTXlatorDate class follows the python encoding.
@author: Frédéric Mantegazza
@author: B. Malinowsky
@copyright: (C) 2013-2015 Frédéric Mantegazza
@copyright: (C) 2006, 2011 B. Malinowsky
@license: GPL
"""
import struct
from pyknyx.services.logger import logging; logger = logging.getLogger(__name__)
from pyknyx.core.dptXlator.dptId import DPTID
from pyknyx.core.dptXlator.dpt import DPT
from pyknyx.core.dptXlator.dptXlatorBase import DPTXlatorBase, DPTXlatorValueError
class DPTXlatorDate(DPTXlatorBase):
""" DPTXlator class for Date (r3U5r4U4r1U7) KNX Datapoint Type
- 3 Byte: rrrDDDDD rrrrMMMM rYYYYYYY
- D: Day [1:31]
- M: Month [1:12]
- Y: Year [0:99]
- r: reserved (0)
.
"""
DPT_Generic = DPT("11.xxx", "Generic", (0, 16777215))
DPT_Date = DPT("11.001", "Date", ((1, 1, 1969), (31, 12, 2068)))
def __init__(self, dptId):
super(DPTXlatorDate, self).__init__(dptId, 3)
def checkData(self, data):
if not 0x000000 <= data <= 0xffffff:
raise DPTXlatorValueError("data %s not in (0x000000, 0xffffff)" % hex(data))
def checkValue(self, value):
for index in range(3):
if not self._dpt.limits[0][index] <= value[index] <= self._dpt.limits[1][index]:
raise DPTXlatorValueError("value not in range %s" % repr(self._dpt.limits))
def dataToValue(self, data):
day = (data >> 16) & 0x1f
month = (data >> 8) & 0x0f
year = data & 0x7f
if year >= 69:
year += 1900
else:
year += 2000
value = (day, month, year)
#logger.debug("DPTXlatorDate._toValue(): value=%d" % value)
return value
def valueToData(self, value):
day = value[0]
month = value[1]
year = value[2]
if year >= 2000:
year -= 2000
else:
year -= 1900
data = day << 16 | month << 8 | year
#logger.debug("DPTXlatorDate.valueToData(): data=%s" % hex(data))
return data
def dataToFrame(self, data):
data = [(data >> shift) & 0xff for shift in range(16, -1, -8)]
return bytearray(struct.pack(">3B", *data))
def frameToData(self, frame):
data = struct.unpack(">3B", frame)
data = data[0] << 16 | data[1] << 8 | data[2]
return data
@property
def day(self):
return self.value[0]
@property
def month(self):
return self.value[1]
@property
def year(self):
return self.value[2]
| knxd/pKNyX | pyknyx/core/dptXlator/dptXlatorDate.py | Python | gpl-3.0 | 4,000 | 0.002255 |
"""api_server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
version = 'v1.0'
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'api/%s/' % version, include('apis.urls'))
]
| AutohomeOps/Assets_Report | api_server/api_server/urls.py | Python | apache-2.0 | 846 | 0 |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('glance')
| dims/glance | glance/version.py | Python | apache-2.0 | 686 | 0 |
import deepdish as dd
class Foo(dd.util.SaveableRegistry):
def __init__(self, x):
self.x = x
@classmethod
def load_from_dict(self, d):
obj = Foo(d['x'])
return obj
def save_to_dict(self):
return {'x': self.x}
@Foo.register('bar')
class Bar(Foo):
def __init__(self, x, y):
self.x = x
self.y = y
@classmethod
def load_from_dict(self, d):
obj = Bar(d['x'], d['y'])
return obj
def save_to_dict(self):
return {'x': self.x, 'y': self.y}
| agarbuno/deepdish | doc/source/codefiles/saveable_example.py | Python | bsd-3-clause | 542 | 0.001845 |
# Copyright (c) 2014-2016, Clemson University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Clemson University nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db.models.signals import pre_save
from django.dispatch import receiver
try:
from django.utils.timezone import now
except ImportError:
import datetime
now = datetime.datetime.now
from django_sshkey.util import PublicKeyParseError, pubkey_parse
from django_sshkey import settings
class UserKey(models.Model):
user = models.ForeignKey(User, db_index=True)
name = models.CharField(max_length=50, blank=True)
key = models.TextField(max_length=2000)
fingerprint = models.CharField(max_length=128, blank=True, db_index=True)
created = models.DateTimeField(auto_now_add=True, null=True)
last_modified = models.DateTimeField(null=True)
last_used = models.DateTimeField(null=True)
class Meta:
db_table = 'sshkey_userkey'
unique_together = [
('user', 'name'),
]
def __unicode__(self):
return unicode(self.user) + u': ' + self.name
def clean_fields(self, exclude=None):
if not exclude or 'key' not in exclude:
self.key = self.key.strip()
if not self.key:
raise ValidationError({'key': ["This field is required."]})
def clean(self):
self.key = self.key.strip()
if not self.key:
return
try:
pubkey = pubkey_parse(self.key)
except PublicKeyParseError as e:
raise ValidationError(str(e))
self.key = pubkey.format_openssh()
self.fingerprint = pubkey.fingerprint()
if not self.name:
if not pubkey.comment:
raise ValidationError('Name or key comment required')
self.name = pubkey.comment
def validate_unique(self, exclude=None):
if self.pk is None:
objects = type(self).objects
else:
objects = type(self).objects.exclude(pk=self.pk)
if exclude is None or 'name' not in exclude:
if objects.filter(user=self.user, name=self.name).count():
message = 'You already have a key with that name'
raise ValidationError({'name': [message]})
if exclude is None or 'key' not in exclude:
try:
other = objects.get(fingerprint=self.fingerprint, key=self.key)
if self.user == other.user:
message = 'You already have that key on file (%s)' % other.name
else:
message = 'Somebody else already has that key on file'
raise ValidationError({'key': [message]})
except type(self).DoesNotExist:
pass
def export(self, format='RFC4716'):
pubkey = pubkey_parse(self.key)
f = format.upper()
if f == 'RFC4716':
return pubkey.format_rfc4716()
if f == 'PEM':
return pubkey.format_pem()
raise ValueError("Invalid format")
def save(self, *args, **kwargs):
if kwargs.pop('update_last_modified', True):
self.last_modified = now()
super(UserKey, self).save(*args, **kwargs)
def touch(self):
self.last_used = now()
self.save(update_last_modified=False)
@receiver(pre_save, sender=UserKey)
def send_email_add_key(sender, instance, **kwargs):
if not settings.SSHKEY_EMAIL_ADD_KEY or instance.pk:
return
from django.template.loader import render_to_string
from django.core.mail import EmailMultiAlternatives
from django.core.urlresolvers import reverse
context_dict = {
'key': instance,
'subject': settings.SSHKEY_EMAIL_ADD_KEY_SUBJECT,
}
request = getattr(instance, 'request', None)
if request:
context_dict['request'] = request
context_dict['userkey_list_uri'] = request.build_absolute_uri(
reverse('django_sshkey.views.userkey_list'))
text_content = render_to_string('sshkey/add_key.txt', context_dict)
msg = EmailMultiAlternatives(
settings.SSHKEY_EMAIL_ADD_KEY_SUBJECT,
text_content,
settings.SSHKEY_FROM_EMAIL,
[instance.user.email],
)
if settings.SSHKEY_SEND_HTML_EMAIL:
html_content = render_to_string('sshkey/add_key.html', context_dict)
msg.attach_alternative(html_content, 'text/html')
msg.send()
| ClemsonSoCUnix/django-sshkey | django_sshkey/models.py | Python | bsd-3-clause | 5,547 | 0.008293 |
"""Cache util functions for ReSDKTables."""
import os
import pickle
import sys
from shutil import rmtree
from typing import Any
from resdk.__about__ import __version__
def _default_cache_dir() -> str:
"""Return default cache directory specific for the current OS.
Code originally from Orange3.misc.environ.
"""
if sys.platform == "darwin":
base = os.path.expanduser("~/Library/Caches")
elif sys.platform == "win32":
base = os.getenv("APPDATA", os.path.expanduser("~/AppData/Local"))
elif os.name == "posix":
base = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
else:
base = os.path.expanduser("~/.cache")
return base
def cache_dir_resdk_base() -> str:
"""Return base ReSDK cache directory."""
return os.path.join(_default_cache_dir(), "ReSDK")
def cache_dir_resdk() -> str:
"""Return ReSDK cache directory."""
v = __version__
if "dev" in v:
# remove git commit hash
v = v[: v.find("dev") + 3]
base = os.path.join(cache_dir_resdk_base(), v)
if sys.platform == "win32":
# On Windows cache and data dir are the same.
# Microsoft suggest using a Cache subdirectory
return os.path.join(base, "Cache")
else:
return base
def clear_cache_dir_resdk() -> None:
"""Delete all cache files from the default cache directory."""
cache_dir = cache_dir_resdk_base()
if os.path.exists(cache_dir):
rmtree(cache_dir)
def load_pickle(pickle_file: str) -> Any:
"""Load object from the pickle file.
:param pickle_file: file path
:return: un-pickled object
"""
if os.path.exists(pickle_file):
with open(pickle_file, "rb") as handle:
return pickle.load(handle)
def save_pickle(obj: Any, pickle_file: str, override=False) -> None:
"""Save given object into a pickle file.
:param obj: object to bi pickled
:param pickle_file: file path
:param override: if True than override existing file
:return:
"""
if not os.path.exists(pickle_file) or override:
with open(pickle_file, "wb") as handle:
pickle.dump(obj, handle, protocol=pickle.HIGHEST_PROTOCOL)
| genialis/resolwe-bio-py | src/resdk/utils/table_cache.py | Python | apache-2.0 | 2,205 | 0 |
import locale
import threading
from contextlib import contextmanager
LOCALE_LOCK = threading.Lock()
BR_DATESHORT_FORMAT = '%a, %d %b - %Hh%M'
@contextmanager
def setlocale(name):
with LOCALE_LOCK:
saved = locale.setlocale(locale.LC_ALL)
try:
yield locale.setlocale(locale.LC_ALL, name)
except:
yield
finally:
locale.setlocale(locale.LC_ALL, saved)
def format_datetimeshort(date_time):
with setlocale('pt_BR'):
return date_time.strftime(BR_DATESHORT_FORMAT).decode('utf-8')
| olavopeixoto/plugin.video.brplay | resources/lib/modules/kodi_util.py | Python | gpl-3.0 | 563 | 0.001776 |
#!/bin/env python
"""Hosts files from the local directory using SSL."""
from __future__ import print_function
import signal
import socket
import ssl
import subprocess
import sys
import threading
killed = False
# pylint: disable=C0411
if sys.version_info.major < 3:
import SimpleHTTPServer
import SocketServer
import urllib
Server = SocketServer.TCPServer
SimpleHTTPRequestHandler = SimpleHTTPServer.SimpleHTTPRequestHandler
urlopen = urllib.urlopen
decode = lambda s: s.decode('string_escape')
else:
from http.server import SimpleHTTPRequestHandler, HTTPServer # pylint: disable=E0401
Server = HTTPServer # pylint: disable=C0103
import urllib.request
urlopen = urllib.request.urlopen
decode = lambda s: bytes(s, 'utf-8').decode('unicode-escape')
class InterruptibleServer(Server):
def __init__(self, server_address, handler):
if sys.version_info.major < 3:
# Python 2's TCPServer is an old style class
Server.__init__(self, server_address, handler)
else:
super().__init__(server_address, handler)
def serve_until_shutdown(self):
global killed
while not killed:
self.handle_request()
class PostCommandsRequestHandler(SimpleHTTPRequestHandler): # pylint: disable=R0903
"""Serves files over GET and handles commands send over POST."""
def do_POST(self): # pylint: disable=C0103
"""Handles POST requests."""
if not self.path.endswith('/'):
# Redirect browser - doing basically what Apache does
self.send_response(301)
self.send_header('Location', self.path + '/')
self.end_headers()
elif self.path == '/command/':
# Forward this request on to the C server, because doing SSL in C
# sounds hard
content_length = int(self.headers.get('Content-Length'))
post_data = self.rfile.read(content_length)
print(post_data)
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(('localhost', 12345))
sock.sendall(post_data)
except Exception as exc:
print('{}, sending 500'.format(exc))
self.send_response(500)
self.send_header('Content-type', 'text/plain; charset=utf-8')
self.end_headers()
# Firefox keeps expecting to get XML back. If we send back
# plain text, it doesn't error out, but it generates a console
# warning, so let's just play nice
self.wfile.write('<p>Unable to contact pi_pcm; is it running?</p>')
return
finally:
sock.close()
self.send_response(200)
self.end_headers()
elif self.path == '/save/':
content_length = int(self.headers.get('Content-Length'))
post_data = decode(self.rfile.read(content_length))
with open('parameters.json', 'w') as parameters_file:
parameters_file.write(post_data)
self.send_response(200)
self.end_headers()
else:
self.send_response(404)
self.end_headers()
def kill_servers(*_):
global killed
killed = True
def main():
"""Main."""
signal.signal(signal.SIGINT, kill_servers)
# The URL fetching stuff inherits this timeout
socket.setdefaulttimeout(0.25)
# Prevent "address already in use" errors
Server.allow_reuse_address = True
base_cert_file_name = 'www.pi-rc.com'
try:
with open(base_cert_file_name + '.cert'):
pass
except IOError:
print(
'''Chrome requires HTTPS to access the webcam. This script can serve HTTPS
requests, but requires that a self-signed certificate be generated first. When
you access this page, you will get a warning - just click through it. This
script will now generate a self-signed certificate.'''
)
subprocess.call((
'openssl',
'req',
'-new',
'-newkey',
'rsa:4096',
'-days',
'365',
'-nodes',
'-x509',
'-subj',
'/C=US/ST=Denial/L=Springfield/O=Dis/CN={}'.format(base_cert_file_name),
'-keyout',
'{}.key'.format(base_cert_file_name),
'-out',
'{}.cert'.format(base_cert_file_name)
))
print('Starting servers')
secure_port = 4443
server_address = ('0.0.0.0', secure_port)
secure_httpd = InterruptibleServer(server_address, PostCommandsRequestHandler)
secure_httpd.socket = ssl.wrap_socket(
secure_httpd.socket,
server_side=True,
certfile='{}.cert'.format(base_cert_file_name),
keyfile='{}.key'.format(base_cert_file_name),
ssl_version=ssl.PROTOCOL_TLSv1
)
insecure_port = 8080
server_address = ('0.0.0.0', insecure_port)
insecure_httpd = InterruptibleServer(server_address, PostCommandsRequestHandler)
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
skari_org = '149.154.158.78'
# This won't actually make a connection
sock.connect((skari_org, 1))
ip = sock.getsockname()[0]
except socket.gaierror:
ip = 'localhost'
finally:
sock.close()
print(
'Running server on https://{ip}:{secure_port}/ and http://{ip}:{insecure_port}/'.format(
ip=ip,
secure_port=secure_port,
insecure_port=insecure_port
)
)
secure_thread = threading.Thread(target=lambda: secure_httpd.serve_until_shutdown())
secure_thread.start()
insecure_httpd.serve_until_shutdown()
if __name__ == '__main__':
main()
| bskari/pi-rc | host_files.py | Python | gpl-2.0 | 5,869 | 0.002045 |
# -*- coding: utf-8 -*-
# @copyright (C) 2014-2015
#Developpeurs 'BARDOU AUGUSTIN - BREZILLON ANTOINE - EUZEN DAVID - FRANCOIS SEBASTIEN - JOUNEAU NICOLAS - KIBEYA AISHA - LE CONG SEBASTIEN -
# MAGREZ VALENTIN - NGASSAM NOUMI PAOLA JOVANY - OUHAMMOUCH SALMA - RIAND MORGAN - TREIMOLEIRO ALEX - TRULLA AURELIEN '
# @license https://www.gnu.org/licenses/gpl-3.0.html GPL version 3
from models import *
from django.contrib.auth.models import User as django_User
from datetime import datetime
from django import forms
from django.contrib.gis.geos import Point
class LoginForm(forms.ModelForm):
class Meta:
model = User
widgets = {
'mail': forms.EmailInput(attrs={'aria-invalid': 'true', 'pattern': 'email', 'required': 'required'}),
}
exclude = ['name', 'firstname', 'sex', 'city', 'zipCode', 'phone', 'idHomeAddress', 'idWorkAddress']
class EmailAuthBackend(object):
def authenticate(self,username=None, password=None):
try:
user = django_User.objects.get(email=username)
if user and check_password(password, user.password):
return user
except django_User.DoesNotExist:
return None
def authenticate2(self,username=None, password=None):
try:
user = Provider.objects.filter(idUser__mail__contains=username).first()
if user and (check_password(password, user.password)):
return user
except User.DoesNotExist:
return None
def auth_email(self, username=None):
try:
user = Provider.objects.filter(idUser__mail__contains=username).first()
if user:
return user
except User.DoesNotExist:
return None
def auth_email2(self, username=None):
try:
user = django_User.objects.get(email=username)
if user:
return user
except User.DoesNotExist:
return None
class ContactForm(forms.Form):
firstname = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'required': 'required'}))
lastname = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'required': 'required'}))
phone = forms.CharField(widget=forms.TextInput(
attrs={'maxlength': '10', 'aria-invalid': 'true', 'pattern': 'phone', 'required': 'required'}))
sender = forms.EmailField(widget=forms.EmailInput(attrs={'aria-invalid': 'false', 'pattern': 'email'}), required=False)
subjectCHOICES = (('Demandeur','Je cherche un trajet'),('Offreur','Je souhaite proposer un trajet'),
('Infos','Informations diverses'),('Autre','Autre'))
subject = forms.ChoiceField(choices=subjectCHOICES)
goalOfApplicationCHOICES = [('', '')] + list(MenusSettings.objects.filter(type="goalOfApplication").values_list('string', 'string'))
goalOfApplication = forms.ChoiceField(widget=forms.Select(attrs={'required':'required'}), choices=goalOfApplicationCHOICES, required=False)
yearOfBirthCHOICES = (tuple((str(n), str(n)) for n in range(1900, datetime.now().year - 15))+(('',''),))[::-1]
yearOfBirth = forms.ChoiceField(widget=forms.Select(attrs={'required':'required'}), choices=yearOfBirthCHOICES, required=False)
message = forms.CharField(widget=forms.Textarea(attrs={'required': 'required'}))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
self.fields['goalOfApplication'].choices = get_menus_settings('goalOfApplication')
def get_menus_settings(type, required=True):
if required:
return [('', '')] + list(MenusSettings.objects.filter(type=type).values_list('string', 'string'))
else:
return list(MenusSettings.objects.filter(type=type).values_list('string', 'string'))
class UserRegisterForm(forms.ModelForm):
class Meta:
model = User
widgets = {
'name': forms.TextInput(attrs={'required': 'required'}),
'firstname': forms.TextInput(attrs={'required': 'required'}),
'sex': forms.RadioSelect(attrs={'required': 'required'}),
'city': forms.TextInput(attrs={'required': 'required'}),
'zipCode': forms.TextInput(attrs={'maxlength': '5', 'aria-invalid': 'true', 'pattern': 'zipCode',
'required': 'required'}),
'mail': forms.EmailInput(attrs={'aria-invalid': 'true', 'pattern': 'email', 'required': 'required'}),
'phone': forms.TextInput(attrs={'maxlength': '10', 'aria-invalid': 'true',
'pattern': 'phone', 'required': 'required'}),
}
exclude = ['idHomeAddress', 'idWorkAddress']
class ProviderRegisterForm(forms.ModelForm):
class Meta:
model = Provider
howKnowledgeCHOICES = get_menus_settings('howKnowledge')
widgets = {
'password': forms.PasswordInput(attrs={'id': 'password', 'required': 'required'}),
'company': forms.TextInput(attrs={'list':'datalistCompany', 'autocomplete':'off'}),
'howKnowledge': forms.Select(attrs={'required':'required'}, choices=howKnowledgeCHOICES)
}
exclude = ['idUser', 'is_active', 'last_login']
def __init__(self, *args, **kwargs):
super(ProviderRegisterForm, self).__init__(*args, **kwargs)
self.fields['howKnowledge'].choices = get_menus_settings('howKnowledge')
class ProviderForm2(forms.ModelForm):
class Meta:
model = Provider
howKnowledgeCHOICES = [('','')] + list(MenusSettings.objects.filter(type="howKnowledge").values_list('string', 'string'))
widgets = {
'company': forms.TextInput(attrs={'list': 'datalistCompany', 'autocomplete': 'off'}),
'howKnowledge': forms.Select(attrs={'required': 'required'}, choices=howKnowledgeCHOICES)
}
exclude = ['idUser', 'is_active', 'last_login', 'password']
def __init__(self, *args, **kwargs):
super(ProviderForm2, self).__init__(*args, **kwargs)
self.fields['howKnowledge'].choices = get_menus_settings('howKnowledge')
class AddressRegisterForm(forms.ModelForm):
latlng = forms.CharField(widget=forms.HiddenInput(), required=False,)
cityHide = forms.CharField(widget=forms.HiddenInput(), required=False,)
zipCodeHide = forms.CharField(widget=forms.HiddenInput(), required=False,)
class Meta:
model = Address
widgets = {
'street':forms.TextInput(attrs={'class': 'field', 'placeholder': 'Indiquez un lieu',
'autocomplete': 'on', 'required': 'required'}),
}
exclude = ['idAddress', 'point', 'city', 'zipCode']
def clean(self):
cleaned_data = super(AddressRegisterForm, self).clean()
coord = cleaned_data['latlng'].replace('(', '')
city = cleaned_data['cityHide']
zipcode = cleaned_data['zipCodeHide']
if city == "":
city = "undefined"
if zipcode == "undefined" or zipcode == "":
zipcode = 0
if coord == "" or coord == "undefined":
raise forms.ValidationError("Bad address")
coord = coord.replace(')', '')
coordTab = coord.split(',')
cleaned_data['point'] = 'POINT(%f %f)' % (float(coordTab[0]), float(coordTab[1]))
cleaned_data['city'] = city
cleaned_data['zipCode'] = zipcode
return cleaned_data
class AddressRegisterFormWork(forms.ModelForm):
latlng = forms.CharField(widget=forms.HiddenInput(), required=False,)
cityHide = forms.CharField(widget=forms.HiddenInput(), required=False,)
zipCodeHide = forms.CharField(widget=forms.HiddenInput(), required=False,)
class Meta:
model = Address
widgets = {
'street': forms.TextInput(attrs={'class': 'field', 'placeholder': 'Indiquez un lieu', 'autocomplete': 'on',
'required': 'required'}),
}
exclude = ['idAddress', 'point', 'city', 'zipCode']
def clean(self):
cleaned_data = super(AddressRegisterFormWork, self).clean()
coord = cleaned_data['latlng'].replace('(', '')
city = cleaned_data['cityHide']
zipcode = cleaned_data['zipCodeHide']
if city == "":
city = "undefined"
if zipcode == "undefined" or zipcode == "":
zipcode = 0
if coord == "" or coord == "undefined":
raise forms.ValidationError("Bad address")
coord = coord.replace(')', '')
coordtab = coord.split(',')
cleaned_data['point'] = 'POINT(%f %f)' % (float(coordtab[0]), float(coordtab[1]))
cleaned_data['city'] = city
cleaned_data['zipCode']= zipcode
return cleaned_data
class PathDepartureRegisterForm(forms.ModelForm):
class Meta:
model = Path
widgets = {
'type': forms.HiddenInput(),
'day': forms.HiddenInput(),
'weekNumber': forms.HiddenInput(),
'schedule': forms.TimeInput(attrs={'class': 'time', 'data-format': 'HH:mm', 'data-template': 'HH : mm',
'value': '08:00'}),
}
exclude = ['idPath', 'idProvider', 'departure', 'arrival', 'startingWeek']
class PathArrivalRegisterForm(forms.ModelForm):
class Meta:
model = Path
widgets = {
'type': forms.HiddenInput(),
'day': forms.HiddenInput(),
'weekNumber': forms.HiddenInput(),
'schedule': forms.TimeInput(attrs={'class': 'time', 'data-format': 'HH:mm', 'data-template': 'HH : mm',
'value':'18:00'}),
}
exclude = ['idPath', 'idProvider', 'departure', 'arrival', 'startingWeek']
class TestUserRegisterForm(forms.ModelForm):
class Meta:
model = User
widgets = {
'name': forms.TextInput(attrs={'required': 'required'}),
'firstname': forms.TextInput(attrs={'required': 'required'}),
'city': forms.TextInput(attrs={'required': 'required'}),
'zipCode': forms.TextInput(attrs={'maxlength': '5', 'aria-invalid': 'true', 'pattern': 'zipCode', 'required': 'required'}),
'mail': forms.EmailInput(attrs={'aria-invalid': 'true', 'pattern': 'email', 'required': 'required'}),
'phone': forms.TextInput(attrs={'maxlength': '10', 'aria-invalid': 'true', 'pattern': 'phone', 'required': 'required'}),
}
exclude = ['idHomeAddress', 'idWorkAddress', 'sex']
class newMdpForm(forms.Form):
oldmdp = forms.CharField(widget=forms.PasswordInput(), label='Ancien mot de passe', required=True)
newmdp1 = forms.CharField(widget=forms.PasswordInput(), label='Nouveau mot de passe', required=True) | ehopsolidaires/ehop-solidaires.fr | ehop/ehopSolidaire_providers_register/forms.py | Python | agpl-3.0 | 10,906 | 0.006877 |
#!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2015, Kovid Goyal <kovid at kovidgoyal.net>'
import errno, socket, select, os
from Cookie import SimpleCookie
from contextlib import closing
from urlparse import parse_qs
import repr as reprlib
from email.utils import formatdate
from operator import itemgetter
from future_builtins import map
from urllib import quote as urlquote
from binascii import hexlify, unhexlify
from calibre import prints
from calibre.constants import iswindows
from calibre.utils.config_base import tweaks
from calibre.utils.localization import get_translator
from calibre.utils.socket_inheritance import set_socket_inherit
from calibre.utils.logging import ThreadSafeLog
from calibre.utils.shared_file import share_open, raise_winerror
HTTP1 = 'HTTP/1.0'
HTTP11 = 'HTTP/1.1'
DESIRED_SEND_BUFFER_SIZE = 16 * 1024 # windows 7 uses an 8KB sndbuf
def http_date(timeval=None):
return type('')(formatdate(timeval=timeval, usegmt=True))
class MultiDict(dict): # {{{
def __setitem__(self, key, val):
vals = dict.get(self, key, [])
vals.append(val)
dict.__setitem__(self, key, vals)
def __getitem__(self, key):
return dict.__getitem__(self, key)[-1]
@staticmethod
def create_from_query_string(qs):
ans = MultiDict()
for k, v in parse_qs(qs, keep_blank_values=True).iteritems():
dict.__setitem__(ans, k.decode('utf-8'), [x.decode('utf-8') for x in v])
return ans
def update_from_listdict(self, ld):
for key, values in ld.iteritems():
for val in values:
self[key] = val
def items(self, duplicates=True):
for k, v in dict.iteritems(self):
if duplicates:
for x in v:
yield k, x
else:
yield k, v[-1]
iteritems = items
def values(self, duplicates=True):
for v in dict.itervalues(self):
if duplicates:
for x in v:
yield x
else:
yield v[-1]
itervalues = values
def set(self, key, val, replace_all=False):
if replace_all:
dict.__setitem__(self, key, [val])
else:
self[key] = val
def get(self, key, default=None, all=False):
if all:
try:
return dict.__getitem__(self, key)
except KeyError:
return []
try:
return self.__getitem__(key)
except KeyError:
return default
def pop(self, key, default=None, all=False):
ans = dict.pop(self, key, default)
if ans is default:
return [] if all else default
return ans if all else ans[-1]
def __repr__(self):
return '{' + ', '.join('%s: %s' % (reprlib.repr(k), reprlib.repr(v)) for k, v in self.iteritems()) + '}'
__str__ = __unicode__ = __repr__
def pretty(self, leading_whitespace=''):
return leading_whitespace + ('\n' + leading_whitespace).join(
'%s: %s' % (k, (repr(v) if isinstance(v, bytes) else v)) for k, v in sorted(self.items(), key=itemgetter(0)))
# }}}
def error_codes(*errnames):
''' Return error numbers for error names, ignoring non-existent names '''
ans = {getattr(errno, x, None) for x in errnames}
ans.discard(None)
return ans
socket_errors_eintr = error_codes("EINTR", "WSAEINTR")
socket_errors_socket_closed = error_codes( # errors indicating a disconnected connection
"EPIPE",
"EBADF", "WSAEBADF",
"ENOTSOCK", "WSAENOTSOCK",
"ENOTCONN", "WSAENOTCONN",
"ESHUTDOWN", "WSAESHUTDOWN",
"ETIMEDOUT", "WSAETIMEDOUT",
"ECONNREFUSED", "WSAECONNREFUSED",
"ECONNRESET", "WSAECONNRESET",
"ECONNABORTED", "WSAECONNABORTED",
"ENETRESET", "WSAENETRESET",
"EHOSTDOWN", "EHOSTUNREACH",
)
socket_errors_nonblocking = error_codes(
'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK')
def start_cork(sock):
if hasattr(socket, 'TCP_CORK'):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 1)
def stop_cork(sock):
if hasattr(socket, 'TCP_CORK'):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 0)
def create_sock_pair(port=0):
'''Create socket pair. Works also on windows by using an ephemeral TCP port.'''
if hasattr(socket, 'socketpair'):
client_sock, srv_sock = socket.socketpair()
set_socket_inherit(client_sock, False), set_socket_inherit(srv_sock, False)
return client_sock, srv_sock
# Create a non-blocking temporary server socket
temp_srv_sock = socket.socket()
set_socket_inherit(temp_srv_sock, False)
temp_srv_sock.setblocking(False)
temp_srv_sock.bind(('127.0.0.1', port))
port = temp_srv_sock.getsockname()[1]
temp_srv_sock.listen(1)
with closing(temp_srv_sock):
# Create non-blocking client socket
client_sock = socket.socket()
client_sock.setblocking(False)
set_socket_inherit(client_sock, False)
try:
client_sock.connect(('127.0.0.1', port))
except socket.error as err:
# EWOULDBLOCK is not an error, as the socket is non-blocking
if err.errno not in socket_errors_nonblocking:
raise
# Use select to wait for connect() to succeed.
timeout = 1
readable = select.select([temp_srv_sock], [], [], timeout)[0]
if temp_srv_sock not in readable:
raise Exception('Client socket not connected in {} second(s)'.format(timeout))
srv_sock = temp_srv_sock.accept()[0]
set_socket_inherit(srv_sock, False)
client_sock.setblocking(True)
return client_sock, srv_sock
def parse_http_list(header_val):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
if isinstance(header_val, bytes):
slash, dquote, comma = b'\\",'
empty = b''
else:
slash, dquote, comma = '\\",'
empty = ''
part = empty
escape = quote = False
for cur in header_val:
if escape:
part += cur
escape = False
continue
if quote:
if cur == slash:
escape = True
continue
elif cur == dquote:
quote = False
part += cur
continue
if cur == comma:
yield part.strip()
part = empty
continue
if cur == dquote:
quote = True
part += cur
if part:
yield part.strip()
def parse_http_dict(header_val):
'Parse an HTTP comma separated header with items of the form a=1, b="xxx" into a dictionary'
if not header_val:
return {}
ans = {}
sep, dquote = b'="' if isinstance(header_val, bytes) else '="'
for item in parse_http_list(header_val):
k, v = item.partition(sep)[::2]
if k:
if v.startswith(dquote) and v.endswith(dquote):
v = v[1:-1]
ans[k] = v
return ans
def sort_q_values(header_val):
'Get sorted items from an HTTP header of type: a;q=0.5, b;q=0.7...'
if not header_val:
return []
def item(x):
e, r = x.partition(';')[::2]
p, v = r.partition('=')[::2]
q = 1.0
if p == 'q' and v:
try:
q = max(0.0, min(1.0, float(v.strip())))
except Exception:
pass
return e.strip(), q
return tuple(map(itemgetter(0), sorted(map(item, parse_http_list(header_val)), key=itemgetter(1), reverse=True)))
def eintr_retry_call(func, *args, **kwargs):
while True:
try:
return func(*args, **kwargs)
except EnvironmentError as e:
if getattr(e, 'errno', None) in socket_errors_eintr:
continue
raise
def get_translator_for_lang(cache, bcp_47_code):
try:
return cache[bcp_47_code]
except KeyError:
pass
cache[bcp_47_code] = ans = get_translator(bcp_47_code)
return ans
def encode_path(*components):
'Encode the path specified as a list of path components using URL encoding'
return '/' + '/'.join(urlquote(x.encode('utf-8'), '').decode('ascii') for x in components)
def encode_name(name):
'Encode a name (arbitrary string) as URL safe characters. See decode_name() also.'
if isinstance(name, unicode):
name = name.encode('utf-8')
return hexlify(name)
def decode_name(name):
return unhexlify(name).decode('utf-8')
class Cookie(SimpleCookie):
def _BaseCookie__set(self, key, real_value, coded_value):
if not isinstance(key, bytes):
key = key.encode('ascii') # Python 2.x cannot handle unicode keys
return SimpleCookie._BaseCookie__set(self, key, real_value, coded_value)
def custom_fields_to_display(db):
ckeys = set(db.field_metadata.ignorable_field_keys())
yes_fields = set(tweaks['content_server_will_display'])
no_fields = set(tweaks['content_server_wont_display'])
if '*' in yes_fields:
yes_fields = ckeys
if '*' in no_fields:
no_fields = ckeys
return frozenset(ckeys & (yes_fields - no_fields))
# Logging {{{
class ServerLog(ThreadSafeLog):
exception_traceback_level = ThreadSafeLog.WARN
class RotatingStream(object):
def __init__(self, filename, max_size=None, history=5):
self.filename, self.history, self.max_size = filename, history, max_size
if iswindows:
self.filename = '\\\\?\\' + os.path.abspath(self.filename)
self.set_output()
def set_output(self):
self.stream = share_open(self.filename, 'ab', -1 if iswindows else 1) # line buffered
try:
self.current_pos = self.stream.tell()
except EnvironmentError:
# Happens if filename is /dev/stdout for example
self.current_pos = 0
self.max_size = None
def flush(self):
self.stream.flush()
def prints(self, level, *args, **kwargs):
kwargs['safe_encode'] = True
kwargs['file'] = self.stream
self.current_pos += prints(*args, **kwargs)
if iswindows:
# For some reason line buffering does not work on windows
end = kwargs.get('end', b'\n')
if b'\n' in end:
self.flush()
self.rollover()
def rename(self, src, dest):
try:
if iswindows:
import win32file, pywintypes
try:
win32file.MoveFileEx(src, dest, win32file.MOVEFILE_REPLACE_EXISTING|win32file.MOVEFILE_WRITE_THROUGH)
except pywintypes.error as e:
raise_winerror(e)
else:
os.rename(src, dest)
except EnvironmentError as e:
if e.errno != errno.ENOENT: # the source of the rename does not exist
raise
def rollover(self):
if self.max_size is None or self.current_pos <= self.max_size:
return
self.stream.close()
for i in xrange(self.history - 1, 0, -1):
src, dest = '%s.%d' % (self.filename, i), '%s.%d' % (self.filename, i+1)
self.rename(src, dest)
self.rename(self.filename, '%s.%d' % (self.filename, 1))
self.set_output()
class RotatingLog(ServerLog):
def __init__(self, filename, max_size=None, history=5):
ServerLog.__init__(self)
self.outputs = [RotatingStream(filename, max_size, history)]
def flush(self):
for o in self.outputs:
o.flush()
# }}}
class HandleInterrupt(object): # {{{
# On windows socket functions like accept(), recv(), send() are not
# interrupted by a Ctrl-C in the console. So to make Ctrl-C work we have to
# use this special context manager. See the echo server example at the
# bottom of this file for how to use it.
def __init__(self, action):
if not iswindows:
return # Interrupts work fine on POSIX
self.action = action
from ctypes import WINFUNCTYPE, windll
from ctypes.wintypes import BOOL, DWORD
kernel32 = windll.LoadLibrary('kernel32')
# <http://msdn.microsoft.com/en-us/library/ms686016.aspx>
PHANDLER_ROUTINE = WINFUNCTYPE(BOOL, DWORD)
self.SetConsoleCtrlHandler = kernel32.SetConsoleCtrlHandler
self.SetConsoleCtrlHandler.argtypes = (PHANDLER_ROUTINE, BOOL)
self.SetConsoleCtrlHandler.restype = BOOL
@PHANDLER_ROUTINE
def handle(event):
if event == 0: # CTRL_C_EVENT
if self.action is not None:
self.action()
self.action = None
# Typical C implementations would return 1 to indicate that
# the event was processed and other control handlers in the
# stack should not be executed. However, that would
# prevent the Python interpreter's handler from translating
# CTRL-C to a `KeyboardInterrupt` exception, so we pretend
# that we didn't handle it.
return 0
self.handle = handle
def __enter__(self):
if iswindows:
if self.SetConsoleCtrlHandler(self.handle, 1) == 0:
raise WindowsError()
def __exit__(self, *args):
if iswindows:
if self.SetConsoleCtrlHandler(self.handle, 0) == 0:
raise WindowsError()
# }}}
class Accumulator(object): # {{{
'Optimized replacement for BytesIO when the usage pattern is many writes followed by a single getvalue()'
def __init__(self):
self._buf = []
self.total_length = 0
def append(self, b):
self._buf.append(b)
self.total_length += len(b)
def getvalue(self):
ans = b''.join(self._buf)
self._buf = []
self.total_length = 0
return ans
# }}}
class ReadOnlyFileBuffer(object):
''' A zero copy implementation of a file like object. Uses memoryviews for efficiency. '''
def __init__(self, raw):
self.sz, self.mv = len(raw), (raw if isinstance(raw, memoryview) else memoryview(raw))
self.pos = 0
def tell(self):
return self.pos
def read(self, n=None):
if n is None:
ans = self.mv[self.pos:]
self.pos = self.sz
return ans
ans = self.mv[self.pos:self.pos+n]
self.pos = min(self.pos + n, self.sz)
return ans
def seek(self, pos, whence=os.SEEK_SET):
if whence == os.SEEK_SET:
self.pos = pos
elif whence == os.SEEK_END:
self.pos = self.sz + pos
else:
self.pos += pos
self.pos = max(0, min(self.pos, self.sz))
return self.pos
def getvalue(self):
return self.mv
def close(self):
pass
| hazrpg/calibre | src/calibre/srv/utils.py | Python | gpl-3.0 | 15,349 | 0.003192 |
# (C) British Crown Copyright 2010 - 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
# import iris tests first so that some things can be initialised before importing anything else
import iris.tests as tests
import unittest
import numpy as np
import iris
import iris.analysis.calculus
import iris.cube
import iris.coord_systems
import iris.coords
import iris.tests.stock
from iris.coords import DimCoord
from iris.tests.test_interpolation import normalise_order
class TestCubeDelta(tests.IrisTest):
def test_invalid(self):
cube = iris.tests.stock.realistic_4d()
with self.assertRaises(iris.exceptions.CoordinateMultiDimError):
t = iris.analysis.calculus.cube_delta(cube, 'surface_altitude')
with self.assertRaises(iris.exceptions.CoordinateMultiDimError):
t = iris.analysis.calculus.cube_delta(cube, 'altitude')
with self.assertRaises(ValueError):
t = iris.analysis.calculus.cube_delta(cube, 'forecast_period')
def test_delta_coord_lookup(self):
cube = iris.cube.Cube(np.arange(10), standard_name='air_temperature')
# Add a coordinate with a lot of metadata.
coord = iris.coords.DimCoord(np.arange(10),
long_name='projection_x_coordinate',
var_name='foo',
attributes={'source': 'testing'},
units='m',
coord_system=iris.coord_systems.OSGB())
cube.add_dim_coord(coord, 0)
delta = iris.analysis.calculus.cube_delta(cube,
'projection_x_coordinate')
delta_coord = delta.coord('projection_x_coordinate')
self.assertEqual(delta_coord, delta.coord(coord))
self.assertEqual(coord, cube.coord(delta_coord))
class TestDeltaAndMidpoint(tests.IrisTest):
def _simple_filename(self, suffix):
return tests.get_result_path(('analysis', 'delta_and_midpoint', 'simple%s.cml' % suffix))
def test_simple1_delta_midpoint(self):
a = iris.coords.DimCoord((np.arange(4, dtype=np.float32) * 90) - 180, long_name='foo',
units='degrees', circular=True)
self.assertXMLElement(a, self._simple_filename('1'))
delta = iris.analysis.calculus._construct_delta_coord(a)
self.assertXMLElement(delta, self._simple_filename('1_delta'))
midpoint = iris.analysis.calculus._construct_midpoint_coord(a)
self.assertXMLElement(midpoint, self._simple_filename('1_midpoint'))
def test_simple2_delta_midpoint(self):
a = iris.coords.DimCoord((np.arange(4, dtype=np.float32) * -90) + 180, long_name='foo',
units='degrees', circular=True)
self.assertXMLElement(a, self._simple_filename('2'))
delta = iris.analysis.calculus._construct_delta_coord(a)
self.assertXMLElement(delta, self._simple_filename('2_delta'))
midpoint = iris.analysis.calculus._construct_midpoint_coord(a)
self.assertXMLElement(midpoint, self._simple_filename('2_midpoint'))
def test_simple3_delta_midpoint(self):
a = iris.coords.DimCoord((np.arange(4, dtype=np.float32) * 90) - 180, long_name='foo',
units='degrees', circular=True)
a.guess_bounds(0.5)
self.assertXMLElement(a, self._simple_filename('3'))
delta = iris.analysis.calculus._construct_delta_coord(a)
self.assertXMLElement(delta, self._simple_filename('3_delta'))
midpoint = iris.analysis.calculus._construct_midpoint_coord(a)
self.assertXMLElement(midpoint, self._simple_filename('3_midpoint'))
def test_simple4_delta_midpoint(self):
a = iris.coords.AuxCoord(np.arange(4, dtype=np.float32) * 90 - 180, long_name='foo', units='degrees')
a.guess_bounds()
b = a.copy()
self.assertXMLElement(b, self._simple_filename('4'))
delta = iris.analysis.calculus._construct_delta_coord(b)
self.assertXMLElement(delta, self._simple_filename('4_delta'))
midpoint = iris.analysis.calculus._construct_midpoint_coord(b)
self.assertXMLElement(midpoint, self._simple_filename('4_midpoint'))
def test_simple5_not_degrees_delta_midpoint(self):
# Not sure it makes sense to have a circular coordinate which does not have a modulus but test it anyway.
a = iris.coords.DimCoord(np.arange(4, dtype=np.float32) * 90 - 180,
long_name='foo', units='meter', circular=True)
self.assertXMLElement(a, self._simple_filename('5'))
delta = iris.analysis.calculus._construct_delta_coord(a)
self.assertXMLElement(delta, self._simple_filename('5_delta'))
midpoints = iris.analysis.calculus._construct_midpoint_coord(a)
self.assertXMLElement(midpoints, self._simple_filename('5_midpoint'))
def test_simple6_delta_midpoint(self):
a = iris.coords.DimCoord(np.arange(5, dtype=np.float32), long_name='foo',
units='count', circular=True)
midpoints = iris.analysis.calculus._construct_midpoint_coord(a)
self.assertXMLElement(midpoints, self._simple_filename('6'))
def test_singular_delta(self):
# Test single valued coordinate mid-points when circular
lon = iris.coords.DimCoord(np.float32(-180.), 'latitude', units='degrees', circular=True)
r_expl = iris.analysis.calculus._construct_delta_coord(lon)
self.assertXMLElement(r_expl, ('analysis', 'delta_and_midpoint', 'delta_one_element_explicit.xml'))
# Test single valued coordinate mid-points when not circular
lon.circular = False
with self.assertRaises(ValueError):
iris.analysis.calculus._construct_delta_coord(lon)
def test_singular_midpoint(self):
# Test single valued coordinate mid-points when circular
lon = iris.coords.DimCoord(np.float32(-180.), 'latitude', units='degrees', circular=True)
r_expl = iris.analysis.calculus._construct_midpoint_coord(lon)
self.assertXMLElement(r_expl, ('analysis', 'delta_and_midpoint', 'midpoint_one_element_explicit.xml'))
# Test single valued coordinate mid-points when not circular
lon.circular = False
with self.assertRaises(ValueError):
iris.analysis.calculus._construct_midpoint_coord(lon)
class TestCoordTrig(tests.IrisTest):
def setUp(self):
points = np.arange(20, dtype=np.float32) * 2.3
bounds = np.concatenate([[points - 0.5 * 2.3],
[points + 0.5 * 2.3]]).T
self.lat = iris.coords.AuxCoord(points, 'latitude', units='degrees', bounds=bounds)
self.rlat = iris.coords.AuxCoord(np.deg2rad(points), 'latitude', units='radians', bounds=np.deg2rad(bounds))
def test_sin(self):
sin_of_coord = iris.analysis.calculus._coord_sin(self.lat)
sin_of_coord_radians = iris.analysis.calculus._coord_sin(self.rlat)
# Check the values are correct (within a tolerance)
np.testing.assert_array_almost_equal(np.sin(self.rlat.points), sin_of_coord.points)
np.testing.assert_array_almost_equal(np.sin(self.rlat.bounds), sin_of_coord.bounds)
# Check that the results of the sin function are almost equal when operating on a coord with degrees and radians
np.testing.assert_array_almost_equal(sin_of_coord.points, sin_of_coord_radians.points)
np.testing.assert_array_almost_equal(sin_of_coord.bounds, sin_of_coord_radians.bounds)
self.assertEqual(sin_of_coord.name(), 'sin(latitude)')
self.assertEqual(sin_of_coord.units, '1')
def test_cos(self):
cos_of_coord = iris.analysis.calculus._coord_cos(self.lat)
cos_of_coord_radians = iris.analysis.calculus._coord_cos(self.rlat)
# Check the values are correct (within a tolerance)
np.testing.assert_array_almost_equal(np.cos(self.rlat.points), cos_of_coord.points)
np.testing.assert_array_almost_equal(np.cos(self.rlat.bounds), cos_of_coord.bounds)
# Check that the results of the cos function are almost equal when operating on a coord with degrees and radians
np.testing.assert_array_almost_equal(cos_of_coord.points, cos_of_coord_radians.points)
np.testing.assert_array_almost_equal(cos_of_coord.bounds, cos_of_coord_radians.bounds)
# Now that we have tested the points & bounds, remove them and just test the xml
cos_of_coord = cos_of_coord.copy(points=np.array([1], dtype=np.float32))
cos_of_coord_radians = cos_of_coord_radians.copy(points=np.array([1], dtype=np.float32))
self.assertXMLElement(cos_of_coord, ('analysis', 'calculus', 'cos_simple.xml'))
self.assertXMLElement(cos_of_coord_radians, ('analysis', 'calculus', 'cos_simple_radians.xml'))
class TestCalculusSimple3(tests.IrisTest):
def setUp(self):
data = np.arange(2500, dtype=np.float32).reshape(50, 50)
cube = iris.cube.Cube(data, standard_name="x_wind", units="km/h")
self.lonlat_cs = iris.coord_systems.GeogCS(6371229)
cube.add_dim_coord(DimCoord(np.arange(50, dtype=np.float32) * 4.5 -180, 'longitude', units='degrees', coord_system=self.lonlat_cs), 0)
cube.add_dim_coord(DimCoord(np.arange(50, dtype=np.float32) * 4.5 -90, 'latitude', units='degrees', coord_system=self.lonlat_cs), 1)
self.cube = cube
def test_diff_wrt_lon(self):
t = iris.analysis.calculus.differentiate(self.cube, 'longitude')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'handmade2_wrt_lon.cml'))
def test_diff_wrt_lat(self):
t = iris.analysis.calculus.differentiate(self.cube, 'latitude')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'handmade2_wrt_lat.cml'))
class TestCalculusSimple2(tests.IrisTest):
def setUp(self):
data = np.array( [[1, 2, 3, 4, 5],
[2, 3, 4, 5, 6],
[3, 4, 5, 6, 7],
[4, 5, 6, 7, 9]], dtype=np.float32)
cube = iris.cube.Cube(data, standard_name="x_wind", units="km/h")
self.lonlat_cs = iris.coord_systems.GeogCS(6371229)
cube.add_dim_coord(DimCoord(np.arange(4, dtype=np.float32) * 90 -180, 'longitude', units='degrees', circular=True, coord_system=self.lonlat_cs), 0)
cube.add_dim_coord(DimCoord(np.arange(5, dtype=np.float32) * 45 -90, 'latitude', units='degrees', coord_system=self.lonlat_cs), 1)
cube.add_aux_coord(DimCoord(np.arange(4, dtype=np.float32), long_name='x', units='count', circular=True), 0)
cube.add_aux_coord(DimCoord(np.arange(5, dtype=np.float32), long_name='y', units='count'), 1)
self.cube = cube
def test_diff_wrt_x(self):
t = iris.analysis.calculus.differentiate(self.cube, 'x')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'handmade_wrt_x.cml'))
def test_diff_wrt_y(self):
t = iris.analysis.calculus.differentiate(self.cube, 'y')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'handmade_wrt_y.cml'))
def test_diff_wrt_lon(self):
t = iris.analysis.calculus.differentiate(self.cube, 'longitude')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'handmade_wrt_lon.cml'))
def test_diff_wrt_lat(self):
t = iris.analysis.calculus.differentiate(self.cube, 'latitude')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'handmade_wrt_lat.cml'))
def test_delta_wrt_x(self):
t = iris.analysis.calculus.cube_delta(self.cube, 'x')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'delta_handmade_wrt_x.cml'))
def test_delta_wrt_y(self):
t = iris.analysis.calculus.cube_delta(self.cube, 'y')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'delta_handmade_wrt_y.cml'))
def test_delta_wrt_lon(self):
t = iris.analysis.calculus.cube_delta(self.cube, 'longitude')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'delta_handmade_wrt_lon.cml'))
def test_delta_wrt_lat(self):
t = iris.analysis.calculus.cube_delta(self.cube, 'latitude')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'delta_handmade_wrt_lat.cml'))
class TestCalculusSimple1(tests.IrisTest):
def setUp(self):
data = np.array( [ [1, 2, 3, 4, 5],
[2, 3, 4, 5, 6],
[3, 4, 5, 6, 7],
[4, 5, 6, 7, 8],
[5, 6, 7, 8, 10] ], dtype=np.float32)
cube = iris.cube.Cube(data, standard_name="x_wind", units="km/h")
cube.add_dim_coord(DimCoord(np.arange(5, dtype=np.float32), long_name='x', units='count'), 0)
cube.add_dim_coord(DimCoord(np.arange(5, dtype=np.float32), long_name='y', units='count'), 1)
self.cube = cube
def test_diff_wrt_x(self):
t = iris.analysis.calculus.differentiate(self.cube, 'x')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'handmade_simple_wrt_x.cml'))
def test_delta_wrt_x(self):
t = iris.analysis.calculus.cube_delta(self.cube, 'x')
self.assertCMLApproxData(t, ('analysis', 'calculus', 'delta_handmade_simple_wrt_x.cml'))
def build_cube(data, spherical=False):
"""
Create a cube suitable for testing.
"""
cube = iris.cube.Cube(data, standard_name="x_wind", units="km/h")
nx = data.shape[-1]
ny = data.shape[-2]
nz = data.shape[-3] if data.ndim > 2 else None
dimx = data.ndim - 1
dimy = data.ndim - 2
dimz = data.ndim - 3 if data.ndim > 2 else None
if spherical:
hcs = iris.coord_systems.GeogCS(6321)
cube.add_dim_coord(DimCoord(np.arange(-180, 180, 360./nx, dtype=np.float32), 'longitude', units='degrees', coord_system=hcs, circular=True), dimx)
cube.add_dim_coord(DimCoord(np.arange(-90, 90, 180./ny, dtype=np.float32), 'latitude', units='degrees', coord_system=hcs), dimy)
else:
cube.add_dim_coord(DimCoord(np.arange(nx, dtype=np.float32) * 2.21 + 2, 'projection_x_coordinate', units='meters'), dimx)
cube.add_dim_coord(DimCoord(np.arange(ny, dtype=np.float32) * 25 -50, 'projection_y_coordinate', units='meters'), dimy)
if nz is None:
cube.add_aux_coord(DimCoord(np.array([10], dtype=np.float32), long_name='z', units='meters', attributes={"positive":"up"}))
else:
cube.add_dim_coord(DimCoord(np.arange(nz, dtype=np.float32) * 2, long_name='z', units='meters', attributes={"positive":"up"}), dimz)
return cube
class TestCalculusWKnownSolutions(tests.IrisTest):
def get_coord_pts(self, cube):
"""return (x_pts, x_ones, y_pts, y_ones, z_pts, z_ones) for the given cube."""
x = cube.coord(axis='X')
y = cube.coord(axis='Y')
z = cube.coord(axis='Z')
if z and z.shape[0] > 1:
x_shp = (1, 1, x.shape[0])
y_shp = (1, y.shape[0], 1)
z_shp = (z.shape[0], 1, 1)
else:
x_shp = (1, x.shape[0])
y_shp = (y.shape[0], 1)
z_shp = None
x_pts = x.points.reshape(x_shp)
y_pts = y.points.reshape(y_shp)
x_ones = np.ones(x_shp)
y_ones = np.ones(y_shp)
if z_shp:
z_pts = z.points.reshape(z_shp)
z_ones = np.ones(z_shp)
else:
z_pts = None
z_ones = None
return (x_pts, x_ones, y_pts, y_ones, z_pts, z_ones)
def test_contrived_differential1(self):
# testing :
# F = ( cos(lat) cos(lon) )
# dF/dLon = - sin(lon) cos(lat) (and to simplify /cos(lat) )
cube = build_cube(np.empty((30, 60)), spherical=True)
x = cube.coord('longitude')
y = cube.coord('latitude')
y_dim = cube.coord_dims(y)[0]
cos_x_pts = np.cos(np.radians(x.points)).reshape(1, x.shape[0])
cos_y_pts = np.cos(np.radians(y.points)).reshape(y.shape[0], 1)
cube.data = cos_y_pts * cos_x_pts
lon_coord = x.copy()
lon_coord.convert_units('radians')
lat_coord = y.copy()
lat_coord.convert_units('radians')
cos_lat_coord = iris.coords.AuxCoord.from_coord(lat_coord)
cos_lat_coord.points = np.cos(lat_coord.points)
cos_lat_coord.units = '1'
cos_lat_coord.rename('cos({})'.format(lat_coord.name()))
temp = iris.analysis.calculus.differentiate(cube, lon_coord)
df_dlon = iris.analysis.maths.divide(temp, cos_lat_coord, y_dim)
x = df_dlon.coord('longitude')
y = df_dlon.coord('latitude')
sin_x_pts = np.sin(np.radians(x.points)).reshape(1, x.shape[0])
y_ones = np.ones((y.shape[0], 1))
data = - sin_x_pts * y_ones
result = df_dlon.copy(data=data)
np.testing.assert_array_almost_equal(result.data, df_dlon.data, decimal=3)
def test_contrived_differential2(self):
# testing :
# w = y^2
# dw_dy = 2*y
cube = build_cube(np.empty((10, 30, 60)), spherical=False)
x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(cube)
w = cube.copy(data=z_ones * x_ones * pow(y_pts, 2.))
r = iris.analysis.calculus.differentiate(w, 'projection_y_coordinate')
x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(r)
result = r.copy(data = y_pts * 2. * x_ones * z_ones)
np.testing.assert_array_almost_equal(result.data, r.data, decimal=6)
def test_contrived_non_spherical_curl1(self):
# testing :
# F(x, y, z) = (y, 0, 0)
# curl( F(x, y, z) ) = (0, 0, -1)
cube = build_cube(np.empty((25, 50)), spherical=False)
x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(cube)
u = cube.copy(data=x_ones * y_pts)
u.rename("u_wind")
v = cube.copy(data=u.data * 0)
v.rename("v_wind")
r = iris.analysis.calculus.curl(u, v)
# Curl returns None when there is no components of Curl
self.assertEqual(r[0], None)
self.assertEqual(r[1], None)
cube = r[2]
self.assertCML(
cube,
('analysis', 'calculus', 'grad_contrived_non_spherical1.cml'),
checksum=False)
self.assertTrue(np.all(np.abs(cube.data - (-1.0)) < 1.0e-7))
def test_contrived_non_spherical_curl2(self):
# testing :
# F(x, y, z) = (z^3, x+2, y^2)
# curl( F(x, y, z) ) = (2y, 3z^2, 1)
cube = build_cube(np.empty((10, 25, 50)), spherical=False)
x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(cube)
u = cube.copy(data=pow(z_pts, 3) * x_ones * y_ones)
v = cube.copy(data=z_ones * (x_pts + 2.) * y_ones)
w = cube.copy(data=z_ones * x_ones * pow(y_pts, 2.))
u.rename('u_wind')
v.rename('v_wind')
w.rename('w_wind')
r = iris.analysis.calculus.curl(u, v, w)
# TODO #235 When regridding is not nearest neighbour: the commented out code could be made to work
# r[0].data should now be tending towards result.data as the resolution of the grid gets higher.
# result = r[0].copy(data=True)
# x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(result)
# result.data = y_pts * 2. * x_ones * z_ones
# print(repr(r[0].data[0:1, 0:5, 0:25:5]))
# print(repr(result.data[0:1, 0:5, 0:25:5]))
# np.testing.assert_array_almost_equal(result.data, r[0].data, decimal=2)
#
# result = r[1].copy(data=True)
# x_pts, x_ones, y_pts, y_ones, z_pts, z_ones = self.get_coord_pts(result)
# result.data = pow(z_pts, 2) * x_ones * y_ones
# np.testing.assert_array_almost_equal(result.data, r[1].data, decimal=6)
result = r[2].copy()
result.data = result.data * 0 + 1
np.testing.assert_array_almost_equal(result.data, r[2].data, decimal=4)
normalise_order(r[1])
self.assertCML(r, ('analysis', 'calculus', 'curl_contrived_cartesian2.cml'), checksum=False)
def test_contrived_spherical_curl1(self):
# testing:
# F(lon, lat, r) = (- r sin(lon), -r cos(lon) sin(lat), 0)
# curl( F(x, y, z) ) = (0, 0, 0)
cube = build_cube(np.empty((30, 60)), spherical=True)
radius = iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS
x = cube.coord('longitude')
y = cube.coord('latitude')
cos_x_pts = np.cos(np.radians(x.points)).reshape(1, x.shape[0])
sin_x_pts = np.sin(np.radians(x.points)).reshape(1, x.shape[0])
cos_y_pts = np.cos(np.radians(y.points)).reshape(y.shape[0], 1)
sin_y_pts = np.sin(np.radians(y.points)).reshape(y.shape[0], 1)
y_ones = np.ones((cube.shape[0], 1))
u = cube.copy(data=-sin_x_pts * y_ones * radius)
v = cube.copy(data=-cos_x_pts * sin_y_pts * radius)
u.rename('u_wind')
v.rename('v_wind')
r = iris.analysis.calculus.curl(u, v)[2]
result = r.copy(data=r.data * 0)
# Note: This numerical comparison was created when the radius was 1000 times smaller
np.testing.assert_array_almost_equal(result.data[5:-5], r.data[5:-5]/1000.0, decimal=1)
self.assertCML(r, ('analysis', 'calculus', 'grad_contrived1.cml'), checksum=False)
def test_contrived_sphrical_curl2(self):
# testing:
# F(lon, lat, r) = (r sin(lat) cos(lon), -r sin(lon), 0)
# curl( F(x, y, z) ) = (0, 0, -2 cos(lon) cos(lat) )
cube = build_cube(np.empty((70, 150)), spherical=True)
radius = iris.analysis.cartography.DEFAULT_SPHERICAL_EARTH_RADIUS
x = cube.coord('longitude')
y = cube.coord('latitude')
cos_x_pts = np.cos(np.radians(x.points)).reshape(1, x.shape[0])
sin_x_pts = np.sin(np.radians(x.points)).reshape(1, x.shape[0])
cos_y_pts = np.cos(np.radians(y.points)).reshape(y.shape[0], 1)
sin_y_pts = np.sin(np.radians(y.points)).reshape(y.shape[0], 1)
y_ones = np.ones((cube.shape[0], 1))
u = cube.copy(data=sin_y_pts * cos_x_pts * radius)
v = cube.copy(data=-sin_x_pts * y_ones * radius)
u.rename('u_wind')
v.rename('v_wind')
lon_coord = x.copy()
lon_coord.convert_units('radians')
lat_coord = y.copy()
lat_coord.convert_units('radians')
cos_lat_coord = iris.coords.AuxCoord.from_coord(lat_coord)
cos_lat_coord.points = np.cos(lat_coord.points)
cos_lat_coord.units = '1'
cos_lat_coord.rename('cos({})'.format(lat_coord.name()))
r = iris.analysis.calculus.curl(u, v)[2]
x = r.coord('longitude')
y = r.coord('latitude')
cos_x_pts = np.cos(np.radians(x.points)).reshape(1, x.shape[0])
cos_y_pts = np.cos(np.radians(y.points)).reshape(y.shape[0], 1)
result = r.copy(data=2*cos_x_pts*cos_y_pts)
# Note: This numerical comparison was created when the radius was 1000 times smaller
np.testing.assert_array_almost_equal(result.data[30:-30, :], r.data[30:-30, :]/1000.0, decimal=1)
self.assertCML(r, ('analysis', 'calculus', 'grad_contrived2.cml'), checksum=False)
class TestCurlInterface(tests.IrisTest):
def test_non_conformed(self):
u = build_cube(np.empty((50, 20)), spherical=True)
v = u.copy()
y = v.coord('latitude')
y.points += 5
self.assertRaises(ValueError, iris.analysis.calculus.curl, u, v)
def test_standard_name(self):
nx = 20; ny = 50; nz = None;
u = build_cube(np.empty((50, 20)), spherical=True)
v = u.copy()
w = u.copy()
u.rename('u_wind')
v.rename('v_wind')
w.rename('w_wind')
r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v)
self.assertEqual(r, (('u', 'v', 'w'), 'wind'))
r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w)
self.assertEqual(r, (('u', 'v', 'w'), 'wind'))
self.assertRaises(ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, None, w)
self.assertRaises(ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, None, None, w)
self.assertRaises(ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, None, None, None)
u.rename("x foobar wibble")
v.rename("y foobar wibble")
w.rename("z foobar wibble")
r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v)
self.assertEqual(r, (('x', 'y', 'z'), 'foobar wibble'))
r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w)
self.assertEqual(r, (('x', 'y', 'z'), 'foobar wibble'))
u.rename("wibble foobar")
v.rename("wobble foobar")
w.rename("tipple foobar")
# r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w) #should raise a Value Error...
self.assertRaises(ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, v)
self.assertRaises(ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, v, w)
u.rename("eastward_foobar")
v.rename("northward_foobar")
w.rename("upward_foobar")
r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v)
self.assertEqual(r, (('eastward', 'northward', 'upward'), 'foobar'))
r = iris.analysis.calculus.spatial_vectors_with_phenom_name(u, v, w)
self.assertEqual(r, (('eastward', 'northward', 'upward'), 'foobar'))
# Change it to have an inconsistent phenomenon
v.rename('northward_foobar2')
self.assertRaises(ValueError, iris.analysis.calculus.spatial_vectors_with_phenom_name, u, v)
if __name__ == "__main__":
unittest.main()
| Jozhogg/iris | lib/iris/tests/test_analysis_calculus.py | Python | lgpl-3.0 | 26,634 | 0.003717 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class ItemAttribute(Document):
def validate(self):
self.validate_duplication()
self.validate_attribute_values()
def validate_duplication(self):
values, abbrs = [], []
for d in self.item_attribute_values:
d.abbr = d.abbr.upper()
if d.attribute_value in values:
frappe.throw(_("{0} must appear only once").format(d.attribute_value))
values.append(d.attribute_value)
if d.abbr in abbrs:
frappe.throw(_("{0} must appear only once").format(d.abbr))
abbrs.append(d.abbr)
def validate_attribute_values(self):
attribute_values = []
for d in self.item_attribute_values:
attribute_values.append(d.attribute_value)
variant_attributes = frappe.db.sql("select DISTINCT attribute_value from `tabVariant Attribute` where attribute=%s", self.name)
if variant_attributes:
for d in variant_attributes:
if d[0] not in attribute_values:
frappe.throw(_("Attribute Value {0} cannot be removed from {1} as Item Variants exist with this Attribute.").format(d[0], self.name))
| treejames/erpnext | erpnext/stock/doctype/item_attribute/item_attribute.py | Python | agpl-3.0 | 1,275 | 0.025882 |
import pytest
import numpy as np
from numpy.testing import assert_allclose
from .....extern.six.moves import range
from ..mle import design_matrix, periodic_fit
@pytest.fixture
def t():
rand = np.random.RandomState(42)
return 10 * rand.rand(10)
@pytest.mark.parametrize('freq', [1.0, 2])
@pytest.mark.parametrize('dy', [None, 2.0])
@pytest.mark.parametrize('bias', [True, False])
def test_design_matrix(t, freq, dy, bias):
X = design_matrix(t, freq, dy, bias=bias)
assert X.shape == (t.shape[0], 2 + bool(bias))
if bias:
assert_allclose(X[:, 0], 1. / (dy or 1.0))
assert_allclose(X[:, -2], np.sin(2 * np.pi * freq * t) / (dy or 1.0))
assert_allclose(X[:, -1], np.cos(2 * np.pi * freq * t) / (dy or 1.0))
@pytest.mark.parametrize('nterms', range(4))
def test_multiterm_design_matrix(t, nterms):
dy = 2.0
freq = 1.5
X = design_matrix(t, freq, dy=dy, bias=True, nterms=nterms)
assert X.shape == (t.shape[0], 1 + 2 * nterms)
assert_allclose(X[:, 0], 1. / dy)
for i in range(1, nterms + 1):
assert_allclose(X[:, 2 * i - 1], np.sin(2 * np.pi * i * freq * t) / dy)
assert_allclose(X[:, 2 * i], np.cos(2 * np.pi * i * freq * t) / dy)
@pytest.mark.parametrize('nterms', range(1, 4))
@pytest.mark.parametrize('freq', [1, 2])
@pytest.mark.parametrize('fit_mean', [True, False])
def test_exact_mle_fit(nterms, freq, fit_mean):
rand = np.random.RandomState(42)
t = 10 * rand.rand(30)
theta = -1 + rand.rand(2 * nterms + 1)
y = np.zeros(t.shape)
if fit_mean:
y = theta[0] * np.ones(t.shape)
for i in range(1, nterms + 1):
y += theta[2 * i - 1] * np.sin(2 * np.pi * i * freq * t)
y += theta[2 * i] * np.cos(2 * np.pi * i * freq * t)
y_fit = periodic_fit(t, y, dy=1, frequency=freq, t_fit=t, nterms=nterms,
center_data=False, fit_mean=fit_mean)
assert_allclose(y, y_fit)
| kelle/astropy | astropy/stats/lombscargle/implementations/tests/test_mle.py | Python | bsd-3-clause | 1,921 | 0.000521 |
"""
See peoplegui--old.py: the alternative here uses nedted row frames with fixed
widdth labels with pack() to acheive the same aligned layout as grid(), but it
takes two extra lines of code as is (though adding window resize support makes
the two techniques roughly the same--see later in the book).
"""
from tkinter import *
from tkinter.messagebox import showerror
import shelve
shelvename = 'class-shelve'
fieldnames = ('name', 'age', 'job', 'pay')
def makeWidgets():
global entries
window = Tk()
window.title('People Shelve')
form = Frame(window)
form.pack()
entries = {}
for label in ('key',) + fieldnames:
row = Frame(form)
lab = Label(row, text=label, width=6)
ent = Entry(row)
row.pack(side=TOP)
lab.pack(side=LEFT)
ent.pack(side=RIGHT)
entries[label] = ent
Button(window, text="Fetch", command=fetchRecord).pack(side=LEFT)
Button(window, text="Update", command=updateRecord).pack(side=LEFT)
Button(window, text="Quit", command=window.quit).pack(side=RIGHT)
return window
def fetchRecord():
key = entries['key'].get()
try:
record = db[key] # fetch by key, show in GUI
except:
showerror(title='Error', message='No such key!')
else:
for field in fieldnames:
entries[field].delete(0, END)
entries[field].insert(0, repr(getattr(record, field)))
def updateRecord():
key = entries['key'].get()
if key in db:
record = db[key] # update existing record
else:
from person import Person # make/store new one for key
record = Person(name='?', age='?') # eval: strings must be quoted
for field in fieldnames:
setattr(record, field, eval(entries[field].get()))
db[key] = record
db = shelve.open(shelvename)
window = makeWidgets()
window.mainloop()
db.close() # back here after quit or window close
| simontakite/sysadmin | pythonscripts/programmingpython/Preview/peoplegui--frame.py | Python | gpl-2.0 | 2,035 | 0.00344 |
from .map_to_surface import \
MapToSurface
from .mapping_task import \
MappingTask
from .mask_task import \
MaskTask
from .move_task import \
MoveTask
from .rotate_copy import \
RotateCopy
| simvisage/oricreate | oricreate/mapping_tasks/__init__.py | Python | gpl-3.0 | 210 | 0 |
import logging
from .data import DefinedTable
logger = logging.getLogger(__name__)
def ensure_tables():
"""When called, ensure that all the tables that we need are created in the
database. The real work is supplied by the DefinedTable base class
"""
for tab in [Subject, ExpCondition]:
logger.debug("Creating table %s", tab.get_table_name())
tab.ensure_table()
class Subject(DefinedTable):
"""An experimental subject that we are tracking in an experimental condition
"""
@classmethod
def get_table_name(self):
return "Subjects"
@classmethod
def get_key_name(self):
return "subject_id"
def __init__(
self,
subject_id=None,
first_name=None,
last_name=None,
email=None,
exp_condition=None
):
self.subject_id = subject_id
self.first_name = first_name
self.last_name = last_name
self.email = email
self.exp_condition = exp_condition
def errors(self):
if not self.subject_id:
yield "Missing subject ID"
if not self.exp_condition:
yield "Missing Experimental Condition"
class ExpCondition(DefinedTable):
"""A single experimental condition that any number of subjects may be a part of
"""
@classmethod
def get_table_name(self):
return "Conditions"
@classmethod
def get_key_name(self):
return "condition_id"
def __init__(
self,
condition_id=None,
condition_name=None,
description=None
):
self.condition_id = condition_id
self.condition_name = condition_name
self.description = description
| memphis-iis/demo-track | demotrack/model.py | Python | apache-2.0 | 1,712 | 0.001168 |
import FWCore.ParameterSet.Config as cms
from HeavyIonsAnalysis.JetAnalysis.jets.akPu3CaloJetSequence_PbPb_mc_cff import *
#PU jets: type 15
akPu3Calomatch15 = akPu3Calomatch.clone(src = cms.InputTag("akPu3CaloJets15"))
akPu3Caloparton15 = akPu3Caloparton.clone(src = cms.InputTag("akPu3CaloJets15"))
akPu3Calocorr15 = akPu3Calocorr.clone(src = cms.InputTag("akPu3CaloJets15"))
akPu3CalopatJets15 = akPu3CalopatJets.clone(jetSource = cms.InputTag("akPu3CaloJets15"),
jetCorrFactorsSource = cms.VInputTag(cms.InputTag("akPu3Calocorr15")),
genJetMatch = cms.InputTag("akPu3Calomatch15"),
genPartonMatch = cms.InputTag("akPu3Caloparton15"),
)
akPu3CaloJetAnalyzer15 = akPu3CaloJetAnalyzer.clone(jetTag = cms.InputTag("akPu3CalopatJets15"), doSubEvent = cms.untracked.bool(True) )
akPu3CaloJetSequence15 = cms.Sequence(akPu3Calomatch15
*
akPu3Caloparton15
*
akPu3Calocorr15
*
akPu3CalopatJets15
*
akPu3CaloJetAnalyzer15
)
| mverwe/JetRecoValidation | PuThresholdTuning/python/akPu3CaloJetSequence15_cff.py | Python | cc0-1.0 | 1,328 | 0.03012 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_liberation_patriot_rodian_female_01.iff"
result.attribute_template_id = 9
result.stfName("npc_name","rodian_base_female")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/mobile/shared_dressed_liberation_patriot_rodian_female_01.py | Python | mit | 471 | 0.046709 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.