code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
"""
This module exposes Home Assistant via Zeroconf.
Zeroconf is also known as Bonjour, Avahi or Multicast DNS (mDNS).
For more details about Zeroconf, please refer to the documentation at
https://home-assistant.io/components/zeroconf/
"""
import logging
import socket
from homeassistant.const import (EVENT_HOMEASSISTANT_STOP, __version__)
REQUIREMENTS = ["zeroconf==0.17.5"]
DEPENDENCIES = ["api"]
_LOGGER = logging.getLogger(__name__)
DOMAIN = "zeroconf"
ZEROCONF_TYPE = "_home-assistant._tcp.local."
def setup(hass, config):
"""Set up Zeroconf and make Home Assistant discoverable."""
from zeroconf import Zeroconf, ServiceInfo
zeroconf = Zeroconf()
zeroconf_name = "{}.{}".format(hass.config.location_name,
ZEROCONF_TYPE)
requires_api_password = (hass.config.api.api_password is not None)
params = {"version": __version__, "base_url": hass.config.api.base_url,
"requires_api_password": requires_api_password}
info = ServiceInfo(ZEROCONF_TYPE, zeroconf_name,
socket.inet_aton(hass.config.api.host),
hass.config.api.port, 0, 0, params)
zeroconf.register_service(info)
def stop_zeroconf(event):
"""Stop Zeroconf."""
zeroconf.unregister_service(info)
zeroconf.close()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_zeroconf)
return True
| mikaelboman/home-assistant | homeassistant/components/zeroconf.py | Python | mit | 1,424 |
# Qucik test file to open TSY01 connection and reads its temp
# 20160819 IMD
#
#
import TSYS01
sensor=TSYS01.TSYS01(0x77)
print 'Temperature = %3.3f C' % sensor.readTemp()
| kiran4399/beagleboat | src/sensors/tsys01/readTemp.py | Python | mit | 175 |
"""The tests for the Ring component."""
import os
import unittest
import requests_mock
from homeassistant import setup
import homeassistant.components.ring as ring
from tests.common import (
get_test_config_dir, get_test_home_assistant, load_fixture)
ATTRIBUTION = 'Data provided by Ring.com'
VALID_CONFIG = {
"ring": {
"username": "foo",
"password": "bar",
}
}
class TestRing(unittest.TestCase):
"""Tests the Ring component."""
def cleanup(self):
"""Cleanup any data created from the tests."""
if os.path.isfile(self.cache):
os.remove(self.cache)
def setUp(self):
"""Initialize values for this test case class."""
self.hass = get_test_home_assistant()
self.cache = get_test_config_dir(ring.DEFAULT_CACHEDB)
self.config = VALID_CONFIG
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
self.cleanup()
@requests_mock.Mocker()
def test_setup(self, mock):
"""Test the setup."""
mock.post('https://api.ring.com/clients_api/session',
text=load_fixture('ring_session.json'))
response = ring.setup(self.hass, self.config)
self.assertTrue(response)
@requests_mock.Mocker()
def test_setup_component_no_login(self, mock):
"""Test the setup when no login is configured."""
mock.post('https://api.ring.com/clients_api/session',
text=load_fixture('ring_session.json'))
conf = self.config.copy()
del conf['ring']['username']
assert not setup.setup_component(self.hass, ring.DOMAIN, conf)
@requests_mock.Mocker()
def test_setup_component_no_pwd(self, mock):
"""Test the setup when no password is configured."""
mock.post('https://api.ring.com/clients_api/session',
text=load_fixture('ring_session.json'))
conf = self.config.copy()
del conf['ring']['password']
assert not setup.setup_component(self.hass, ring.DOMAIN, conf)
| JshWright/home-assistant | tests/components/test_ring.py | Python | apache-2.0 | 2,096 |
#!/bin/env dls-python
# This script comes from the dls_scripts python module
"""
Check or set the contact information for a module on the repository. By default
the contacts of the module are printed in a basic format. The CSV flag will
print the contacts in CSV format. To set a contact and/or cc, the contact and
cc flags can be used. To set a contact/cc for one or many modules the import
flag can be used with a valid CSV file. If one contact is changed the other
will be left as it was.
"""
import os
import sys
import shutil
import json
import logging
import csv
import ldap
from dls_ade.argument_parser import ArgParser
from dls_ade import Server
from dls_ade.exceptions import FedIdError
from dls_ade import logconfig
from dls_ade.constants import LDAP_SERVER_URL
# Optional but useful in a library or non-main module:
logging.getLogger(__name__).addHandler(logging.NullHandler())
log = logging.getLogger(__name__)
usermsg = logging.getLogger(name="usermessages")
usage = """
Default <area> is 'support'.
Set or get primary contact (contact) and secondary contact (cc) properties
for <modules> (can just give one module)
e.g.
%prog ip autosave calc
# View the contacts for the ip, autosave and calc modules in support area
%prog -s
# View all the module contacts and ccs in the support area in csv format
%prog -c tmc43 -d jr76 -p pysvn
# Set the python module pysvn to have contact tmc43 and cc jr76
%prog -m /tmp/module_contacts_backup.csv
# Import the module contact and cc from /tmp/module_contacts_backup.csv
# and set them in svn. The csv file must be in the same format as produced
# by the -s command, but any specified contact and cc names are ignored,
# only fed-ids are used.
"""
def make_parser():
"""
Takes ArgParse instance with default arguments and adds
Positional Arguments:
* modules
Flags:
* -c (contact)
* -d (cc)
* -s (csv)
* -m (import)
Returns:
:class:`argparse.ArgumentParser`: ArgParse instance
"""
parser = ArgParser(usage)
# nargs='*' makes <modules> an optional positional argument; a list of N
# entries
parser.add_argument(
"modules", nargs='*', type=str, default=None,
help="Name(s) of module(s) to list/set contacts for")
parser.add_argument(
"-c", "--contact", action="store", type=str, metavar="FED_ID",
dest="contact", help="Set the contact property to FED_ID")
parser.add_argument(
"-d", "--cc", action="store", type=str, metavar="FED_ID", dest="cc",
help="Set the cc property to FED_ID")
parser.add_argument(
"-s", "--csv", action="store_true", dest="csv",
help="Print output as csv file")
parser.add_argument(
"-m", "--import", action="store", type=str, metavar="CSV_FILE",
dest="imp", help="Import a CSV_FILE with header and rows of format:" +
"\nModule, Contact, Contact Name, CC, CC Name")
return parser
def check_parsed_args_compatible(imp, modules, contact, cc, parser):
"""
Check that the combination of arguments are compatible.
Args:
imp(str): CSV import specifier
modules(str): Modules argument
contact(str): Contact argument
cc(str): CC argument
parser(:class:`argparse.ArgumentParser`): Parser instance
Raises:
:class:`argparse.ArgumentParser` error:
* --import cannot be used with --contact or --cc
* You cannot set all modules in an area to one contact/cc, enter a
specific module.
"""
if imp and (contact or cc):
parser.error("--import cannot be used with --contact or --cc")
# Stop user from setting all modules in an area to one contact/cc
if not modules and (contact or cc):
parser.error("You cannot set all modules in an area to one contact/cc,"
" enter a specific module.")
# Just in case parser.error doesn't stop the script
return 1
def lookup_contact_name(fed_id):
"""
Perform an LDAP search to find the Name and Surname corresponding to a
FED-ID.
Args:
fed_id(str): FED-ID to search for
Returns:
str: Contact name
Raises: FedIdError if the fed_id cannot be found in LDAP
"""
# Set up ldap search parameters
l = ldap.initialize(LDAP_SERVER_URL)
basedn = "dc=fed,dc=cclrc,dc=ac,dc=uk"
search_filter = "(&(cn={}))".format(fed_id)
search_attribute = ["givenName", "sn"]
search_scope = ldap.SCOPE_SUBTREE
# Perform search, print message so user knows where program hangs
# The lookup can hang at l.result() if the FED-ID does not exist.
log.debug("Performing search for {}".format(fed_id))
l.simple_bind_s()
ldap_result_id = l.search(basedn, search_scope, search_filter,
search_attribute)
ldap_output = l.result(ldap_result_id, 0)
log.debug(ldap_output)
# ldap_output has the form:
# (100, [('CN=<FED-ID>,OU=DLS,DC=fed,DC=cclrc,DC=ac,DC=uk',
# {'givenName': ['<FirstName>'], 'sn': ['<Surname>']})])
if ldap_output[0] == 115:
# If the FED-ID does not exist, ldap_output will look like:
# (115, [(None,
# ['ldap://res02.fed.cclrc.ac.uk/DC=res02,DC=fed,DC=cclrc,DC=ac,DC=uk'])])
raise FedIdError("\"{}\" is not a FedID in LDAP".format(fed_id))
# Extract contact name from output
name_info_dict = ldap_output[1][0][1]
# name_info_dict: {'givenName': ['<FirstName>'], 'sn': ['<Surname>']}
contact_name = \
name_info_dict['givenName'][0].decode('utf-8') + ' ' + name_info_dict['sn'][0].decode('utf-8')
return contact_name
def output_csv_format(contact, cc_contact, module):
"""
Format contact info string in CSV format.
Args:
contact(str): Contact FED-ID
cc_contact(str): Contact FED-ID
module(str): Module name
Returns:
str: Formatted output
"""
# Check if <FED-ID>s are specified in repo, if not don't run lookup
# function
if contact != 'unspecified':
try:
contact_name = lookup_contact_name(contact)
except FedIdError as exception:
log.error(exception.message)
contact_name = contact
else:
contact_name = contact
if cc_contact != 'unspecified':
try:
cc_name = lookup_contact_name(cc_contact)
except FedIdError as exception:
log.error(exception.message)
cc_name = contact
else:
cc_name = cc_contact
output = "{module},{contact},{contact_name},{cc},{cc_name}".format(
module=module, contact=contact, contact_name=contact_name,
cc=cc_contact, cc_name=cc_name)
return output
def import_from_csv(modules, area, imp):
"""
Extract contact info from a given CSV file.
Args:
modules(list): List of valid modules
area(str): Area of modules that are having contacts changed
imp(str): File path for CSV to get info from
Returns:
A list of tuples containing module, contact and cc
"""
reader = csv.reader(open(imp, "r"))
# Extract data from reader object
csv_file = []
for row in reader:
csv_file.append(row)
log.debug(csv_file)
if not csv_file:
raise Exception("CSV file is empty")
contacts = []
for row in csv_file:
# Check for header row and skip
if row[0] != "Module":
# CSV file format should be: Module,Contact,Contact Name,CC,CC Name
if len(row) > 1:
module = row[0].strip()
contact = row[1].strip()
else:
raise Exception("Module {} has no corresponding contact in"
" CSV file".format(row[0]))
if len(row) > 3:
cc = row[3].strip()
else:
cc = ""
if module not in modules:
raise Exception("Module {module} not in {area} area".format(
module=module, area=area))
if module in [x[0] for x in contacts]:
raise Exception("Module {} defined twice in"
" CSV file".format(module))
contacts.append((module, contact, cc))
return contacts
def edit_contact_info(repo, contact='', cc=''):
"""
Write to .gitattributes file to change contacts of repo.
Args:
repo(:class:`~git.repo.base.Repo`): Repository instance of module
contact(str): Contact FED-ID
cc(str): CC FED-ID
Returns:
str: Commit message summarising changes made
"""
current_contact = repo.git.check_attr("module-contact", ".").split(' ')[-1]
current_cc = repo.git.check_attr("module-cc", ".").split(' ')[-1]
if contact in [current_contact, ''] and cc in [current_cc, '']:
usermsg.info("Leaving contacts unchanged ({contact}, {cc})".format(contact=current_contact, cc=current_cc))
return
# Check that FED-IDs exist,
# if they don't lookup...() will (possibly) hang and raise an exception
if contact:
contact = contact.strip()
lookup_contact_name(contact)
else:
contact = current_contact
if cc:
cc = cc.strip()
lookup_contact_name(cc)
else:
cc = current_cc
module = repo.working_tree_dir.split('/')[-1]
with open(os.path.join(
repo.working_tree_dir, '.gitattributes'), 'w') as git_attr_file:
commit_message = ''
user_msg = "{}: ".format(module)
if contact:
user_msg += "Setting contact to {}".format(contact)
commit_message += "Set contact to {}. ".format(contact)
git_attr_file.write("* module-contact={}\n".format(contact))
if cc:
user_msg += " Setting cc to {}".format(cc)
commit_message += "Set cc to {}.".format(cc)
git_attr_file.write("* module-cc={}\n".format(cc))
usermsg.info(user_msg)
return commit_message
def _main():
parser = make_parser()
args = parser.parse_args()
log.info(json.dumps({'CLI': sys.argv, 'options_args': vars(args)}))
check_parsed_args_compatible(args.imp, args.modules, args.contact, args.cc,
parser)
server = Server()
# Create the list of modules from args, or the server if none provided
modules = []
if args.modules:
for module in args.modules:
modules.append(module)
else:
repo_list = server.get_server_repo_list(args.area)
for path in repo_list:
modules.append(path.split(args.area + "/")[-1])
# If no contacts or csv file provided to edit, use default script
# operation: print contacts
if not (args.contact or args.cc or args.imp):
print_out = []
for module in modules:
source = server.dev_module_path(module, args.area)
try:
vcs = server.temp_clone(source)
except ValueError:
log.error("Module {} does not exist in {} [{}]".format(
module, args.area, source))
continue
# Retrieve contact info
contact = vcs.repo.git.check_attr(
"module-contact", ".").split(' ')[-1]
cc_contact = vcs.repo.git.check_attr(
"module-cc", ".").split(' ')[-1]
if args.csv:
print_out.append(output_csv_format(contact, cc_contact, module))
else:
print_out.append("{module} Contact: {contact}, CC: {cc}"
.format(cc=cc_contact, contact=contact, module=module))
shutil.rmtree(vcs.repo.working_tree_dir)
module_contacts_str = ""
if args.csv:
module_contacts_str += "Module,Contact,Contact Name,CC,CC Name\n"
module_contacts_str += "\n".join(print_out)
usermsg.info(module_contacts_str) # print the list of module owners
return
# If we get to this point, we are assigning contacts
if args.imp:
contacts = import_from_csv(modules, args.area, args.imp)
else:
# If no csv file provided, retrieve contacts from args
contacts = []
for module in modules:
contacts.append((module, args.contact, args.cc))
# Checkout modules and change contacts
for module, contact, cc in contacts:
log.debug("Cloning {module} from {area}".format(module=module, area=args.area))
source = server.dev_module_path(module, args.area)
vcs = server.temp_clone(source)
repo = vcs.repo
try:
edit_summary = edit_contact_info(repo, contact, cc,)
except FedIdError as exception:
usermsg.error("ABORTING: {}".format(exception.message))
sys.exit(1)
if edit_summary is not None:
index = repo.index
index.add(['.gitattributes'])
index.commit(edit_summary)
origin = repo.remotes.origin
log.debug("Pushing module contact/cc attributes to remote on \'{}\'".format(repo.active_branch))
origin.push(repo.active_branch)
shutil.rmtree(repo.working_tree_dir)
def main():
# Catch unhandled exceptions and ensure they're logged
try:
logconfig.setup_logging(application='dls-module-contacts.py')
_main()
except Exception as e:
logging.exception(e)
logging.getLogger("usermessages").exception("ABORT: Unhandled exception (see trace below): {}".format(e))
exit(1)
if __name__ == "__main__":
main()
| dls-controls/dls_ade | dls_ade/dls_module_contacts.py | Python | apache-2.0 | 13,715 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Downloads the FITS files that are used in image testing and for building documentation.
"""
import time
from ....utils.data import download_file
from ....io import fits
__all__ = ['fetch_msx_hdu',
'fetch_rosat_hdu',
'fetch_twoMASS_k_hdu',
'fetch_l1448_co_hdu',
'fetch_bolocam_hdu',
]
MAX_RETRIES = 10
TIME_BETWEEN_RETRIES = 5
URL = 'http://data.astropy.org/'
def fetch_hdu(filename, cache=True):
"""Download a FITS file to the cache and open HDU 0.
"""
for retry in range(MAX_RETRIES):
try:
path = download_file(URL + filename, cache=cache, timeout=30)
except URLError:
if retry == MAX_RETRIES - 1:
raise
else:
time.sleep(TIME_BETWEEN_RETRIES)
else:
break
else:
raise Exception("Failed to download file {0}".format(filename))
return fits.open(path)[0]
def fetch_msx_hdu(cache=True):
"""Fetch the MSX example dataset HDU.
Returns
-------
hdu : `~astropy.io.fits.ImageHDU`
Image HDU
"""
return fetch_hdu('galactic_center/gc_msx_e.fits', cache=cache)
def fetch_rosat_hdu(cache=True):
return fetch_hdu('allsky/allsky_rosat.fits', cache=cache)
def fetch_twoMASS_k_hdu(cache=True):
return fetch_hdu('galactic_center/gc_2mass_k.fits', cache=cache)
def fetch_l1448_co_hdu(cache=True):
return fetch_hdu('l1448/l1448_13co.fits', cache=cache)
def fetch_bolocam_hdu(cache=True):
return fetch_hdu('galactic_center/gc_bolocam_gps.fits', cache=cache)
| joergdietrich/astropy | astropy/visualization/wcsaxes/tests/datasets.py | Python | bsd-3-clause | 1,655 |
# -*- coding: utf8 -*-
from phystricks import *
def WQVZooAhkdlegv():
pspict,fig = SinglePicture("WQVZooAhkdlegv")
pspict.dilatation(1)
A=Point(0,0)
B=Point(5,0)
O=Segment(A,B).midpoint()+(0,3)
trig=Polygon(A,B,O)
aA=AngleAOB(B,A,O)
aA.parameters.color="red"
aA.put_arrow(pspict=pspict)
aO=AngleAOB(A,O,B)
aO.parameters.color="red"
aO.put_arrow(pspict=pspict)
aB=AngleAOB(O,B,A)
aB.parameters.color="red"
aB.put_arrow(pspict=pspict)
pspict.DrawGraphs(trig,aA,aO,aB)
pspict.comment="The angles are red and an arrow is drawn in the trigonometric sense."
fig.no_figure()
fig.conclude()
fig.write_the_file()
| LaurentClaessens/phystricks | testing/demonstration/phystricksWQVZooAhkdlegv.py | Python | gpl-3.0 | 692 |
# -*- encoding: utf-8 -*-
from abjad import *
def test_scoretools_NoteHead_is_cautionary_01():
note_head = scoretools.NoteHead(written_pitch="c'")
assert note_head.is_cautionary is None
note_head.is_cautionary = True
assert note_head.is_cautionary == True
note_head.is_cautionary = False
assert note_head.is_cautionary == False | mscuthbert/abjad | abjad/tools/scoretools/test/test_scoretools_NoteHead_is_cautionary.py | Python | gpl-3.0 | 354 |
"""Support for Tuya switches."""
from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchDevice
from . import DATA_TUYA, TuyaDevice
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Tuya Switch device."""
if discovery_info is None:
return
tuya = hass.data[DATA_TUYA]
dev_ids = discovery_info.get("dev_ids")
devices = []
for dev_id in dev_ids:
device = tuya.get_device_by_id(dev_id)
if device is None:
continue
devices.append(TuyaSwitch(device))
add_entities(devices)
class TuyaSwitch(TuyaDevice, SwitchDevice):
"""Tuya Switch Device."""
def __init__(self, tuya):
"""Init Tuya switch device."""
super().__init__(tuya)
self.entity_id = ENTITY_ID_FORMAT.format(tuya.object_id())
@property
def is_on(self):
"""Return true if switch is on."""
return self.tuya.state()
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.tuya.turn_on()
def turn_off(self, **kwargs):
"""Turn the device off."""
self.tuya.turn_off()
| fbradyirl/home-assistant | homeassistant/components/tuya/switch.py | Python | apache-2.0 | 1,134 |
#!/usr/bin/python
import socket
import cPickle
import os
import sys
import signal
PORT = 54321
def handle(cs, addr):
print "Conn from", addr
cs.sendall("HAI\n")
try:
l = cPickle.loads(cs.recv(1024))
s = sum(l)
cs.sendall("%d\n" % s)
except:
cs.sendall("fail :(\n")
cs.sendall("bye\n")
cs.close()
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(("0.0.0.0", PORT))
s.listen(100)
while 1:
(cs, addr) = s.accept()
pid = os.fork()
if pid == 0:
s.close()
handle(cs, addr)
sys.exit(0)
cs.close()
| godoppl/project | sts/amateria/level0/level1.py | Python | gpl-3.0 | 584 |
import os
import numpy as np
def initialize_pyrngs():
from gslrandom import PyRNG, get_omp_num_threads
if "OMP_NUM_THREADS" in os.environ:
num_threads = os.environ["OMP_NUM_THREADS"]
else:
num_threads = get_omp_num_threads()
assert num_threads > 0
# Choose random seeds
seeds = np.random.randint(2**16, size=num_threads)
return [PyRNG(seed) for seed in seeds]
def convert_discrete_to_continuous(S, dt):
# Convert S to continuous time
from pybasicbayes.util.general import ibincount
T = S.shape[0] * dt
S_ct = dt * np.concatenate([ibincount(Sk) for Sk in S.T]).astype(float)
S_ct += dt * np.random.rand(*S_ct.shape)
assert np.all(S_ct < T)
C_ct = np.concatenate([k*np.ones(Sk.sum()) for k,Sk in enumerate(S.T)]).astype(int)
# Sort the data
perm = np.argsort(S_ct)
S_ct = S_ct[perm]
C_ct = C_ct[perm]
return S_ct, C_ct, T
def get_unique_file_name(filedir, filename):
"""
Get a unique filename by appending filename with .x, where x
is the next untaken number
"""
import fnmatch
# Get the number of conflicting log files
fnames = os.listdir(filedir)
conflicts = fnmatch.filter(fnames, "%s*" % filename)
nconflicts = len(conflicts)
if nconflicts > 0:
unique_name = "%s.%d" % (filename, nconflicts+1)
else:
unique_name = filename
return unique_name
def logistic(x,lam_max=1.0):
return lam_max*1.0/(1.0+np.exp(-x))
def logit(x,lam_max=1.0):
return np.log(x/lam_max)-np.log(1-(x/lam_max))
def sample_nig(mu0, lmbda0, alpha0, beta0):
mu0, lmbda0, alpha0, beta0 = np.broadcast_arrays(mu0, lmbda0, alpha0, beta0)
shp = mu0.shape
assert lmbda0.shape == alpha0.shape == beta0.shape == shp
tau = np.array(np.random.gamma(alpha0, 1./beta0)).reshape(shp)
mu = np.array(np.random.normal(mu0, np.sqrt(1./(lmbda0 * tau)))).reshape(shp)
return mu, tau | mattjj/pyhawkes | pyhawkes/utils/utils.py | Python | mit | 1,947 |
#!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Start-up script for ESP.
# Configures nginx and fetches service configuration.
#
# Exit codes:
# 1 - failed to fetch,
# 2 - validation error,
# 3 - IO error,
# 4 - argument parsing error,
# in addition to NGINX error codes.
import argparse
import collections
import fetch_service_config as fetch
import json
import logging
import os
import re
import sys
import textwrap
import uuid
from collections import Counter
from mako.template import Template
# Location of NGINX binary
NGINX = "/usr/sbin/nginx"
# Location of NGINX template
NGINX_CONF_TEMPLATE = "/etc/nginx/nginx-auto.conf.template"
SERVER_CONF_TEMPLATE = "/etc/nginx/server-auto.conf.template"
# Custom nginx config used by customers are hardcoded to this path
SERVER_CONF = "/etc/nginx/server_config.pb.txt"
# Location of generated config files
CONFIG_DIR = "/etc/nginx/endpoints"
# Protocol prefixes
GRPC_PREFIX = "grpc://"
HTTP_PREFIX = "http://"
HTTPS_PREFIX = "https://"
# Metadata service
METADATA_ADDRESS = "http://169.254.169.254"
# Management service
MANAGEMENT_ADDRESS = "https://servicemanagement.googleapis.com"
# Service management service
SERVICE_MGMT_URL_TEMPLATE = ("{}/v1/services/{}/config?configId={}")
# DNS resolver
DNS_RESOLVER = "8.8.8.8"
# Default HTTP/1.x port
DEFAULT_PORT = 8080
# Default status port
DEFAULT_STATUS_PORT = 8090
# Default backend
DEFAULT_BACKEND = "127.0.0.1:8081"
# Default rollout_strategy
DEFAULT_ROLLOUT_STRATEGY = "fixed"
# Default xff_trusted_proxy_list
DEFAULT_XFF_TRUSTED_PROXY_LIST = "0.0.0.0/0, 0::/0"
# Default PID file location (for nginx as a daemon)
DEFAULT_PID_FILE = "/var/run/nginx.pid"
# Default nginx worker_processes
DEFAULT_WORKER_PROCESSES = "1"
# Google default application credentials environment variable
GOOGLE_CREDS_KEY = "GOOGLE_APPLICATION_CREDENTIALS"
Port = collections.namedtuple('Port',
['port', 'proto'])
Location = collections.namedtuple('Location',
['path', 'backends', 'proto'])
Ingress = collections.namedtuple('Ingress',
['ports', 'host', 'locations'])
def write_pid_file(args):
try:
f = open(args.pid_file, 'w+')
f.write(str(os.getpid()))
f.close()
except IOError as err:
logging.error("Failed to save PID file: " + args.pid_file)
logging.error(err.strerror)
sys.exit(3)
def write_template(ingress, nginx_conf, args):
# Load template
try:
template = Template(filename=args.template)
except IOError as err:
logging.error("Failed to load NGINX config template. " + err.strerror)
sys.exit(3)
conf = template.render(
ingress=ingress,
pid_file=args.pid_file,
status=args.status_port,
service_account=args.service_account_key,
metadata=args.metadata,
resolver=args.dns,
access_log=args.access_log,
healthz=args.healthz,
xff_trusted_proxies=args.xff_trusted_proxies,
tls_mutual_auth=args.tls_mutual_auth,
underscores_in_headers=args.underscores_in_headers,
allow_invalid_headers=args.allow_invalid_headers,
enable_websocket=args.enable_websocket,
client_max_body_size=args.client_max_body_size,
client_body_buffer_size=args.client_body_buffer_size,
worker_processes=args.worker_processes,
cors_preset=args.cors_preset,
cors_allow_origin=args.cors_allow_origin,
cors_allow_origin_regex=args.cors_allow_origin_regex,
cors_allow_methods=args.cors_allow_methods,
cors_allow_headers=args.cors_allow_headers,
cors_allow_credentials=args.cors_allow_credentials,
cors_expose_headers=args.cors_expose_headers,
ssl_protocols=args.ssl_protocols,
google_cloud_platform=(args.non_gcp==False))
# Save nginx conf
try:
f = open(nginx_conf, 'w+')
f.write(conf)
f.close()
except IOError as err:
logging.error("Failed to save NGINX config." + err.strerror)
sys.exit(3)
def write_server_config_template(server_config, args):
# Load template
try:
template = Template(filename=args.server_config_template)
except IOError as err:
logging.error("Failed to load server config template. " + err.strerror)
sys.exit(3)
conf = template.render(
service_configs=args.service_configs,
management=args.management,
service_control_url_override=args.service_control_url_override,
rollout_id=args.rollout_id,
rollout_strategy=args.rollout_strategy,
always_print_primitive_fields=args.transcoding_always_print_primitive_fields,
client_ip_header=args.client_ip_header,
client_ip_position=args.client_ip_position,
rewrite_rules=args.rewrite,
disable_cloud_trace_auto_sampling=args.disable_cloud_trace_auto_sampling,
cloud_trace_url_override=args.cloud_trace_url_override)
# Save nginx conf
try:
f = open(server_config, 'w+')
f.write(conf)
f.close()
except IOError as err:
logging.error("Failed to save server config." + err.strerror)
sys.exit(3)
def ensure(config_dir):
if not os.path.exists(config_dir):
try:
os.makedirs(config_dir)
except OSError as exc:
logging.error("Cannot create config directory.")
sys.exit(3)
def assert_file_exists(fl):
if not os.path.exists(fl):
logging.error("Cannot find the specified file " + fl)
sys.exit(3)
def start_nginx(nginx, nginx_conf):
try:
# Control is relinquished to nginx process after this line
os.execv(nginx, ['nginx', '-p', '/usr', '-c', nginx_conf])
except OSError as err:
logging.error("Failed to launch NGINX: " + nginx)
logging.error(err.strerror)
sys.exit(3)
def fetch_and_save_service_config_url(args, token, service_mgmt_url, filename):
try:
# download service config
config = fetch.fetch_service_json(service_mgmt_url, token)
# Save service json for ESP
service_config = args.config_dir + "/" + filename
try:
f = open(service_config, 'w+')
json.dump(config, f, sort_keys=True, indent=2,
separators=(',', ': '))
f.close()
except IOError as err:
logging.error("Cannot save service config." + err.strerror)
sys.exit(3)
except fetch.FetchError as err:
logging.error(err.message)
sys.exit(err.code)
def fetch_and_save_service_config(args, token, version, filename):
try:
# build request url
service_mgmt_url = SERVICE_MGMT_URL_TEMPLATE.format(args.management,
args.service,
version)
# Validate service config if we have service name and version
logging.info("Fetching the service configuration "\
"from the service management service")
fetch_and_save_service_config_url(args, token, service_mgmt_url, filename)
except fetch.FetchError as err:
logging.error(err.message)
sys.exit(err.code)
# config_id might have invalid character for file name.
def generate_service_config_filename(version):
return str(uuid.uuid5(uuid.NAMESPACE_DNS, str(version)))
# parse xff_trusted_proxy_list
def handle_xff_trusted_proxies(args):
args.xff_trusted_proxies = []
if args.xff_trusted_proxy_list is not None:
for proxy in args.xff_trusted_proxy_list.split(","):
proxy = proxy.strip()
if proxy:
args.xff_trusted_proxies.append(proxy)
def fetch_service_config(args):
args.service_configs = {};
args.rollout_id = ""
try:
# Check service_account_key and non_gcp
if args.non_gcp and args.service_account_key is None:
logging.error("If --non_gcp is specified, --service_account_key has to be specified");
sys.exit(3)
# Get the access token
if args.service_account_key is None:
logging.info("Fetching an access token from the metadata service")
token = fetch.fetch_access_token(args.metadata)
else:
token = fetch.make_access_token(args.service_account_key)
if args.service_config_url is not None:
# Set the file name to "service.json", if either service
# config url or version is specified for backward compatibility
filename = "service.json"
fetch_and_save_service_config_url(args, token, args.service_config_url, filename)
args.service_configs[args.config_dir + "/" + filename] = 100;
else:
# fetch service name, if not specified
if (args.service is None or not args.service.strip()) and args.check_metadata:
logging.info(
"Fetching the service name from the metadata service")
args.service = fetch.fetch_service_name(args.metadata)
# if service name is not specified, display error message and exit
if args.service is None:
if args.check_metadata:
logging.error("Unable to fetch service name from the metadata service");
else:
logging.error("Service name is not specified");
sys.exit(3)
# fetch service config rollout strategy from metadata, if not specified
if (args.rollout_strategy is None or not args.rollout_strategy.strip()) and args.check_metadata:
logging.info(
"Fetching the service config rollout strategy from the metadata service")
args.rollout_strategy = \
fetch.fetch_service_config_rollout_strategy(args.metadata);
if args.rollout_strategy is None or not args.rollout_strategy.strip():
args.rollout_strategy = DEFAULT_ROLLOUT_STRATEGY
# fetch service config ID, if not specified
if (args.version is None or not args.version.strip()) and args.check_metadata:
logging.info("Fetching the service config ID "\
"from the metadata service")
args.version = fetch.fetch_service_config_id(args.metadata)
# Fetch api version from latest successful rollouts
if args.version is None or not args.version.strip():
logging.info(
"Fetching the service config ID from the rollouts service")
rollout = fetch.fetch_latest_rollout(args.management,
args.service, token)
args.rollout_id = rollout["rolloutId"]
for version, percentage in rollout["trafficPercentStrategy"]["percentages"].iteritems():
filename = generate_service_config_filename(version)
fetch_and_save_service_config(args, token, version, filename)
args.service_configs[args.config_dir + "/" + filename] = percentage;
else:
# Set the file name to "service.json", if either service
# config url or version is specified for backward compatibility
filename = "service.json"
fetch_and_save_service_config(args, token, args.version, filename)
args.service_configs[args.config_dir + "/" + filename] = 100;
except fetch.FetchError as err:
logging.error(err.message)
sys.exit(err.code)
def make_ingress(args):
ports = []
# Set port by default
if (args.http_port is None and
args.http2_port is None and
args.ssl_port is None):
args.http_port = DEFAULT_PORT
# Check for port collisions
collisions = Counter([
args.http_port, args.http2_port,
args.ssl_port, args.status_port])
collisions.pop(None, 0)
if len(collisions) > 0:
shared_port, count = collisions.most_common(1)[0]
if count > 1:
logging.error("Port " + str(shared_port) + " is used more than once.")
sys.exit(2)
if args.http_port is not None:
ports.append(Port(args.http_port, "http"))
if args.http2_port is not None:
ports.append(Port(args.http2_port, "http2"))
if args.ssl_port is not None:
ports.append(Port(args.ssl_port, "ssl"))
if args.backend.startswith(GRPC_PREFIX):
proto = "grpc"
backends = [args.backend[len(GRPC_PREFIX):]]
elif args.backend.startswith(HTTP_PREFIX):
proto = "http"
backends = [args.backend[len(HTTP_PREFIX):]]
elif args.backend.startswith(HTTPS_PREFIX):
proto = "https"
backend = args.backend[len(HTTPS_PREFIX):]
if not re.search(r':[0-9]+$', backend):
backend = backend + ':443'
backends = [backend]
else:
proto = "http"
backends = [args.backend]
locations = [Location(
path='/',
backends=backends,
proto=proto)]
ingress = Ingress(
ports=ports,
host='""',
locations=locations)
return ingress
class ArgumentParser(argparse.ArgumentParser):
def error(self, message):
self.print_help(sys.stderr)
self.exit(4, '%s: error: %s\n' % (self.prog, message))
def make_argparser():
parser = ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description = '''
ESP start-up script. This script fetches the service configuration from the
service management service and configures ESP to expose the specified ports and
proxy requests to the specified backend.
The service name and config ID are optional. If not supplied, the script
fetches the service name and the config ID from the metadata service as
attributes "{service_name}" and "{service_config_id}".
ESP relies on the metadata service to fetch access tokens for Google services.
If you deploy ESP outside of Google Cloud environment, you need to provide a
service account credentials file by setting {creds_key}
environment variable or by passing "-k" flag to this script.
If a custom nginx config file is provided ("-n" flag), the script launches ESP
with the provided config file. Otherwise, the script uses the exposed ports
("-p", "-P", "-S", "-N" flags) and the backend ("-a" flag) to generate an nginx
config file.'''.format(
service_name = fetch._METADATA_SERVICE_NAME,
service_config_id = fetch._METADATA_SERVICE_CONFIG_ID,
creds_key = GOOGLE_CREDS_KEY
))
parser.add_argument('-k', '--service_account_key', help=''' Use the service
account key JSON file to access the service control and the service
management. You can also set {creds_key} environment
variable to the location of the service account credentials JSON file. If
the option is omitted, ESP contacts the metadata service to fetch an access
token. '''.format(creds_key = GOOGLE_CREDS_KEY))
parser.add_argument('-s', '--service', help=''' Set the name of the
Endpoints service. If omitted and -c not specified, ESP contacts the
metadata service to fetch the service name. ''')
parser.add_argument('-v', '--version', help=''' Set the service config ID of
the Endpoints service. If omitted and -c not specified, ESP contacts the
metadata service to fetch the service config ID. ''')
parser.add_argument('-n', '--nginx_config', help=''' Use a custom nginx
config file instead of the config template {template}. If you specify this
option, then all the port options are ignored.
'''.format(template=NGINX_CONF_TEMPLATE))
parser.add_argument('-p', '--http_port', default=None, type=int, help='''
Expose a port to accept HTTP/1.x connections. By default, if you do not
specify any of the port options (-p, -P, and -S), then port {port} is
exposed as HTTP/1.x port. However, if you specify any of the port options,
then only the ports you specified are exposed, which may or may not include
HTTP/1.x port. '''.format(port=DEFAULT_PORT))
parser.add_argument('-P', '--http2_port', default=None, type=int, help='''
Expose a port to accept HTTP/2 connections. Note that this cannot be the
same port as HTTP/1.x port. ''')
parser.add_argument('-S', '--ssl_port', default=None, type=int, help='''
Expose a port for HTTPS requests. Accepts both HTTP/1.x and HTTP/2
secure connections. Requires the certificate and key files
/etc/nginx/ssl/nginx.crt and /etc/nginx/ssl/nginx.key''')
parser.add_argument('-N', '--status_port', default=DEFAULT_STATUS_PORT,
type=int, help=''' Change the ESP status port. Status information is
available at /endpoints_status location over HTTP/1.x. Default value:
{port}.'''.format(port=DEFAULT_STATUS_PORT))
parser.add_argument('-a', '--backend', default=DEFAULT_BACKEND, help='''
Change the application server address to which ESP proxies the requests.
Default value: {backend}. For HTTPS backends, please use "https://" prefix,
e.g. https://127.0.0.1:8081. For HTTP/1.x backends, prefix "http://" is
optional. For GRPC backends, please use "grpc://" prefix,
e.g. grpc://127.0.0.1:8081.'''.format(backend=DEFAULT_BACKEND))
parser.add_argument('-t', '--tls_mutual_auth', action='store_true', help='''
Enable TLS mutual authentication for HTTPS backends.
Default value: Not enabled. Please provide the certificate and key files
/etc/nginx/ssl/backend.crt and /etc/nginx/ssl/backend.key.''')
parser.add_argument('-c', '--service_config_url', default=None, help='''
Use the specified URL to fetch the service configuration instead of using
the default URL template
{template}.'''.format(template=SERVICE_MGMT_URL_TEMPLATE))
parser.add_argument('-z', '--healthz', default=None, help='''Define a
health checking endpoint on the same ports as the application backend. For
example, "-z healthz" makes ESP return code 200 for location "/healthz",
instead of forwarding the request to the backend. Default: not used.''')
parser.add_argument('-R', '--rollout_strategy',
default=None,
help='''The service config rollout strategy, [fixed|managed],
Default value: {strategy}'''.format(strategy=DEFAULT_ROLLOUT_STRATEGY),
choices=['fixed', 'managed'])
parser.add_argument('-x', '--xff_trusted_proxy_list',
default=DEFAULT_XFF_TRUSTED_PROXY_LIST,
help='''Comma separated list of trusted proxy for X-Forwarded-For
header, Default value: {xff_trusted_proxy_list}'''.
format(xff_trusted_proxy_list=DEFAULT_XFF_TRUSTED_PROXY_LIST))
parser.add_argument('--check_metadata', action='store_true',
help='''Enable fetching access token, service name, service config ID
and rollout strategy from the metadata service''')
parser.add_argument('--underscores_in_headers', action='store_true',
help='''Allow headers contain underscores to pass through by setting
"underscores_in_headers on;" directive.
''')
parser.add_argument('--allow_invalid_headers', action='store_true',
help='''Allow "invalid" headers by adding "ignore_invalid_headers off;"
directive. This is required to support all legal characters specified
in RFC 7230.
''')
parser.add_argument('--enable_websocket', action='store_true',
help='''Enable nginx WebSocket support.
''')
parser.add_argument('--generate_self_signed_cert', action='store_true',
help='''Generate a self-signed certificate and key at start, then
store them in /etc/nginx/ssl/nginx.crt and /etc/nginx/ssl/nginx.key.
This is useful when only a random self-sign cert is needed to serve
HTTPS requests. Generated certificate will have Common Name
"localhost" and valid for 10 years.
''')
parser.add_argument('--client_max_body_size', default='32m', help='''
Sets the maximum allowed size of the client request body, specified
in the "Content-Length" request header field. If the size in a request
exceeds the configured value, the 413 (Request Entity Too Large) error
is returned to the client. Please be aware that browsers cannot correctly
display this error. Setting size to 0 disables checking of client request
body size.''')
parser.add_argument('--client_body_buffer_size', default='128k', help='''
Sets buffer size for reading client request body. In case the request
body is larger than the buffer, the whole body or only its part is
written to a temporary file.''')
parser.add_argument('--rewrite', action='append', help=
'''Internally redirect the request uri with a pair of pattern and
replacement. Pattern and replacement should be separated by whitespace.
If the request uri matches perl compatible regular expression,
then the request uri will be replaced with the replacement.
pattern and replacement follow the rewrite function of Module
ngx_http_rewrite_module except flag.
http://nginx.org/en/docs/http/ngx_http_rewrite_module.html#rewrite
The "rewrite" argument can be repeat multiple times. Rewrite rules are
executed sequentially in the order of arguments.
ex.
--rewrite "/apis/shelves\\\\?id=(.*)&key=(.*) /shelves/\$1?key=\$2"
--rewrite "^/api/v1/view/(.*) /view/\$1"
''')
parser.add_argument('--worker_processes', default=DEFAULT_WORKER_PROCESSES,
help='''Value for nginx "worker_processes". Each worker is a single process
with no additional threads, so scale this if you will receive more load
than a single CPU can handle. Use `auto` to automatically set to the number
of CPUs available, but be aware that containers may be limited to less than
that of their host. Also, the ESP cache to Service Control is per-worker,
so keep this value as low as possible.
''')
# Specify a custom service.json path.
# If this is specified, service json will not be fetched.
parser.add_argument('--service_json_path',
default=None,
help=argparse.SUPPRESS)
# Customize metadata service url prefix.
parser.add_argument('-m', '--metadata',
default=METADATA_ADDRESS,
help=argparse.SUPPRESS)
# Customize management service url prefix.
parser.add_argument('-g', '--management',
default=MANAGEMENT_ADDRESS,
help=argparse.SUPPRESS)
# Customize servicecontrol url prefix.
parser.add_argument('--service_control_url_override',
default=None,
help=argparse.SUPPRESS)
# Fetched service config and generated nginx config are placed
# into config_dir as service.json and nginx.conf files
parser.add_argument('--config_dir',
default=CONFIG_DIR,
help=argparse.SUPPRESS)
# nginx.conf template
parser.add_argument('--template',
default=NGINX_CONF_TEMPLATE,
help=argparse.SUPPRESS)
# nginx.conf template
parser.add_argument('--server_config_template',
default=SERVER_CONF_TEMPLATE,
help=argparse.SUPPRESS)
# nginx binary location
parser.add_argument('--nginx',
default=NGINX,
help=argparse.SUPPRESS)
# Address of the DNS resolver used by nginx http.cc
parser.add_argument('--dns',
default=DNS_RESOLVER,
help=argparse.SUPPRESS)
# Access log destination. Use special value 'off' to disable.
parser.add_argument('--access_log',
default='/dev/stdout',
help=argparse.SUPPRESS)
# PID file location.
parser.add_argument('--pid_file',
default=DEFAULT_PID_FILE,
help=argparse.SUPPRESS)
# always_print_primitive_fields.
parser.add_argument('--transcoding_always_print_primitive_fields',
action='store_true',
help=argparse.SUPPRESS)
parser.add_argument('--client_ip_header', default=None, help='''
Defines the HTTP header name to extract client IP address.''')
parser.add_argument('--client_ip_position', default=0, help='''
Defines the position of the client IP address. The default value is 0.
The index usage is the same as the array index in many languages,
such as Python. This flag is only applied when --client_ip_header is
specified.''')
# CORS presets
parser.add_argument('--cors_preset',
default=None,
help='''
Enables setting of CORS headers. This is useful when using a GRPC
backend, since a GRPC backend cannot set CORS headers.
Specify one of available presets to configure CORS response headers
in nginx. Defaults to no preset and therefore no CORS response
headers. If no preset is suitable for the use case, use the
--nginx_config arg to use a custom nginx config file.
Available presets:
- basic - Assumes all location paths have the same CORS policy.
Responds to preflight OPTIONS requests with an empty 204, and the
results of preflight are allowed to be cached for up to 20 days
(1728000 seconds). See descriptions for args --cors_allow_origin,
--cors_allow_methods, --cors_allow_headers, --cors_expose_headers,
--cors_allow_credentials for more granular configurations.
- cors_with_regex - Same as basic preset, except that specifying
allowed origins in regular expression. See descriptions for args
--cors_allow_origin_regex, --cors_allow_methods,
--cors_allow_headers, --cors_expose_headers, --cors_allow_credentials
for more granular configurations.
''')
parser.add_argument('--cors_allow_origin',
default='*',
help='''
Only works when --cors_preset is 'basic'. Configures the CORS header
Access-Control-Allow-Origin. Defaults to "*" which allows all origins.
''')
parser.add_argument('--cors_allow_origin_regex',
default='',
help='''
Only works when --cors_preset is 'cors_with_regex'. Configures the
whitelists of CORS header Access-Control-Allow-Origin with regular
expression.
''')
parser.add_argument('--cors_allow_methods',
default='GET, POST, PUT, PATCH, DELETE, OPTIONS',
help='''
Only works when --cors_preset is in use. Configures the CORS header
Access-Control-Allow-Methods. Defaults to allow common HTTP
methods.
''')
parser.add_argument('--cors_allow_headers',
default='DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization',
help='''
Only works when --cors_preset is in use. Configures the CORS header
Access-Control-Allow-Headers. Defaults to allow common HTTP
headers.
''')
parser.add_argument('--cors_allow_credentials', action='store_true',
help='''
Only works when --cors_preset is in use. Enable the CORS header
Access-Control-Allow-Credentials. By default, this header is disabled.
''')
parser.add_argument('--cors_expose_headers',
default='Content-Length,Content-Range',
help='''
Only works when --cors_preset is in use. Configures the CORS header
Access-Control-Expose-Headers. Defaults to allow common response headers.
''')
parser.add_argument('--non_gcp', action='store_true',
help='''
By default, ESP tries to talk to GCP metadata server to get VM
location in the first few requests. setting this flag to true to skip
this step.
''')
parser.add_argument('--disable_cloud_trace_auto_sampling', action='store_true',
help='''
Disable cloud trace auto sampling. By default, 1 request out of every
1000 or 1 request out of every 10 seconds is enabled with cloud trace.
Set this flag to false to disable such auto sampling. Cloud trace can
still be enabled from request HTTP headers with trace context regardless
this flag value.
''')
parser.add_argument('--ssl_protocols',
default=None, action='append', help='''
Enable the specified SSL protocols. Please refer to
https://nginx.org/en/docs/http/ngx_http_ssl_module.html#ssl_protocols.
The "ssl_protocols" argument can be repeated multiple times to specify multiple
SSL protocols (e.g., --ssl_protocols=TLSv1.1 --ssl_protocols=TLSv1.2).
''')
parser.add_argument('--generate_config_file_only', action='store_true',
help='''Only generate the nginx config file without running ESP. This option is
for testing that the generated nginx config file is as expected.
''')
parser.add_argument('--server_config_generation_path',
default=None, help='''
Define where to write the server configuration file. This option only works when
--generate_config_file_only is used. When --generate_config_file_only is used but
--server_config_generation_path is absent, the server configuration file generation
is skipped.
''')
# Customize cloudtrace service url prefix.
parser.add_argument('--cloud_trace_url_override',
default=None,
help=argparse.SUPPRESS)
return parser
if __name__ == '__main__':
parser = make_argparser()
args = parser.parse_args()
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if args.generate_config_file_only:
if args.nginx_config:
logging.error("--nginx_config is not allowed when --generate_config_file_only")
sys.exit(3)
# Set credentials file from the environment variable
if args.service_account_key is None:
if GOOGLE_CREDS_KEY in os.environ:
args.service_account_key = os.environ[GOOGLE_CREDS_KEY]
# Write pid file for the supervising process
write_pid_file(args)
# Handles IP addresses of trusted proxies
handle_xff_trusted_proxies(args)
# Get service config
if args.service_json_path:
args.rollout_id = ''
assert_file_exists(args.service_json_path)
args.service_configs = {args.service_json_path: 100}
else:
# Fetch service config and place it in the standard location
ensure(args.config_dir)
if not args.generate_config_file_only:
fetch_service_config(args)
# Generate server_config
if args.generate_config_file_only:
if args.server_config_generation_path is None:
logging.error("when --generate_config_file_only, must specify --server_config_generation_path")
sys.exit(3)
else:
write_server_config_template(args.server_config_generation_path, args)
else:
write_server_config_template(SERVER_CONF, args)
# Generate nginx config if not specified
nginx_conf = args.nginx_config
if nginx_conf is None:
ingress = make_ingress(args)
nginx_conf = args.config_dir + "/nginx.conf"
ensure(args.config_dir)
write_template(ingress, nginx_conf, args)
if args.generate_config_file_only:
exit(0)
# Generate self-signed cert if needed
if args.generate_self_signed_cert:
if not os.path.exists("/etc/nginx/ssl"):
os.makedirs("/etc/nginx/ssl")
logging.info("Generating self-signed certificate...")
os.system(("openssl req -x509 -newkey rsa:2048"
" -keyout /etc/nginx/ssl/nginx.key -nodes"
" -out /etc/nginx/ssl/nginx.crt"
' -days 3650 -subj "/CN=localhost"'))
# Start NGINX
start_nginx(args.nginx, nginx_conf)
| cloudendpoints/endpoints-tools | obsoleted_start_esp/start_esp.py | Python | apache-2.0 | 32,618 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2018-08-13
git sha : $Format:%H$
copyright : (C) 2018 by Philipe Borba - Cartographic Engineer @ Brazilian Army
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt5.QtCore import QCoreApplication
from DsgTools.core.GeometricTools.geometryHandler import GeometryHandler
from qgis.core import (QgsDataSourceUri, QgsFeature, QgsFeatureSink,
QgsProcessing, QgsProcessingAlgorithm,
QgsProcessingOutputVectorLayer,
QgsProcessingParameterBoolean,
QgsProcessingParameterFeatureSink,
QgsProcessingParameterFeatureSource,
QgsProcessingParameterNumber,
QgsProcessingParameterVectorLayer, QgsWkbTypes,
QgsProcessingException)
from .validationAlgorithm import ValidationAlgorithm
class IdentifyAnglesInInvalidRangeAlgorithm(ValidationAlgorithm):
FLAGS = 'FLAGS'
INPUT = 'INPUT'
SELECTED = 'SELECTED'
MIN_ANGLE = 'MIN_ANGLE'
MAX_ANGLE = 'MAX_ANGLE'
def initAlgorithm(self, config):
"""
Parameter setting.
"""
self.addParameter(
QgsProcessingParameterVectorLayer(
self.INPUT,
self.tr('Input layer'),
[QgsProcessing.TypeVectorLine, QgsProcessing.TypeVectorPolygon]
)
)
self.addParameter(
QgsProcessingParameterBoolean(
self.SELECTED,
self.tr('Process only selected features')
)
)
self.addParameter(
QgsProcessingParameterNumber(
self.MIN_ANGLE,
self.tr('Minimum angle'),
minValue=0,
defaultValue=80
)
)
self.addParameter(
QgsProcessingParameterNumber(
self.MAX_ANGLE,
self.tr('Maximum angle'),
minValue=0,
defaultValue=100
)
)
self.addParameter(
QgsProcessingParameterFeatureSink(
self.FLAGS,
self.tr('{0} Flags').format(self.displayName())
)
)
def processAlgorithm(self, parameters, context, feedback):
"""
Here is where the processing itself takes place.
"""
geometryHandler = GeometryHandler()
inputLyr = self.parameterAsVectorLayer(parameters, self.INPUT, context)
if inputLyr is None:
raise QgsProcessingException(self.invalidSourceError(parameters, self.INPUT))
onlySelected = self.parameterAsBool(parameters, self.SELECTED, context)
minAngle = self.parameterAsDouble(parameters, self.MIN_ANGLE, context)
maxAngle = self.parameterAsDouble(parameters, self.MAX_ANGLE, context)
if maxAngle <= minAngle:
raise QgsProcessingException(self.tr('Invalid Range'))
self.prepareFlagSink(parameters, inputLyr, QgsWkbTypes.Point, context)
# Compute the number of steps to display within the progress bar and
# get features from source
featureList, total = self.getIteratorAndFeatureCount(inputLyr, onlySelected = onlySelected)
for current, feat in enumerate(featureList):
# Stop the algorithm if cancel button has been clicked
if feedback.isCanceled():
break
outOfBoundsList = geometryHandler.getOutOfBoundsAngle(feat, 0, invalidRange=[minAngle, maxAngle])
if outOfBoundsList:
for item in outOfBoundsList:
flagText = self.tr('Feature from layer {0} with id={1} has angle of value {2} degrees, which is in invalid interval [{3},{4}].').format(inputLyr.name(), item['feat_id'], item['angle'], minAngle, maxAngle)
self.flagFeature(item['geom'], flagText)
# Update the progress bar
feedback.setProgress(int(current * total))
return {self.FLAGS: self.flag_id}
def name(self):
"""
Returns the algorithm name, used for identifying the algorithm. This
string should be fixed for the algorithm, and must not be localised.
The name should be unique within each provider. Names should contain
lowercase alphanumeric characters only and no spaces or other
formatting characters.
"""
return 'identifyanglesininvalidrangealgorithm'
def displayName(self):
"""
Returns the translated algorithm name, which should be used for any
user-visible display of the algorithm name.
"""
return self.tr('Identify Angles in Invalid Range')
def group(self):
"""
Returns the name of the group this algorithm belongs to. This string
should be localised.
"""
return self.tr('Quality Assurance Tools (Identification Processes)')
def groupId(self):
"""
Returns the unique ID of the group this algorithm belongs to. This
string should be fixed for the algorithm, and must not be localised.
The group id should be unique within each provider. Group id should
contain lowercase alphanumeric characters only and no spaces or other
formatting characters.
"""
return 'DSGTools: Quality Assurance Tools (Identification Processes)'
def tr(self, string):
return QCoreApplication.translate('IdentifyAnglesInInvalidRangeAlgorithm', string)
def createInstance(self):
return IdentifyAnglesInInvalidRangeAlgorithm()
| lcoandrade/DsgTools | core/DSGToolsProcessingAlgs/Algs/ValidationAlgs/identifyAnglesInInvalidRangeAlgorithm.py | Python | gpl-2.0 | 6,643 |
import psycogreen.gevent
import functools
import json
import hug.interface
import hug._empty as empty
from cgi import parse_header
from collections import OrderedDict
from hug.types import text
original_wraps = functools.wraps
original_gather_parameters = hug.interface.HTTP.gather_parameters
original_call_function = hug.interface.HTTP.call_function
def wraps(function):
"""Enables building decorators around functions used for hug routes without chaninging their function signature"""
def wrap(decorator):
decorator = original_wraps(function)(decorator)
if not hasattr(function, 'original'):
decorator.original = function
else:
decorator.original = function.original
return decorator
return wrap
def call_function(self, **parameters):
"""Let request/response got by interface, even the interface has no kwargs.
:param self:
:param parameters:
:return:
"""
# Mock the function parameters and add request into it
self.all_parameters = tuple({'request', 'response', *self.all_parameters})
return original_call_function(self, **parameters)
def gather_parameters(self, request, response, api_version=None, **input_parameters):
"""Gathers and returns all parameters that will be used for this endpoint"""
# Mock the function parameters and add request into it
self.all_parameters = tuple({'request', 'response', *self.all_parameters})
return original_gather_parameters(self, request, response, api_version, **input_parameters)
def patch():
psycogreen.gevent.patch_psycopg()
functools.wraps = wraps
# hug.interface.Interface.documentation = documentation
hug.interface.HTTP.gather_parameters = gather_parameters
hug.interface.HTTP.call_function = call_function
| faith0811/makiki | makiki/monkey.py | Python | mit | 1,806 |
# minifileset.py - a simple language to select files
#
# Copyright 2017 Facebook, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
from .i18n import _
from . import (
error,
fileset,
filesetlang,
pycompat,
)
def _sizep(x):
# i18n: "size" is a keyword
expr = filesetlang.getstring(x, _(b"size requires an expression"))
return fileset.sizematcher(expr)
def _compile(tree):
if not tree:
raise error.ParseError(_(b"missing argument"))
op = tree[0]
if op == b'withstatus':
return _compile(tree[1])
elif op in {b'symbol', b'string', b'kindpat'}:
name = filesetlang.getpattern(
tree, {b'path'}, _(b'invalid file pattern')
)
if name.startswith(b'**'): # file extension test, ex. "**.tar.gz"
ext = name[2:]
for c in pycompat.bytestr(ext):
if c in b'*{}[]?/\\':
raise error.ParseError(_(b'reserved character: %s') % c)
return lambda n, s: n.endswith(ext)
elif name.startswith(b'path:'): # directory or full path test
p = name[5:] # prefix
pl = len(p)
f = lambda n, s: n.startswith(p) and (
len(n) == pl or n[pl : pl + 1] == b'/'
)
return f
raise error.ParseError(
_(b"unsupported file pattern: %s") % name,
hint=_(b'paths must be prefixed with "path:"'),
)
elif op in {b'or', b'patterns'}:
funcs = [_compile(x) for x in tree[1:]]
return lambda n, s: any(f(n, s) for f in funcs)
elif op == b'and':
func1 = _compile(tree[1])
func2 = _compile(tree[2])
return lambda n, s: func1(n, s) and func2(n, s)
elif op == b'not':
return lambda n, s: not _compile(tree[1])(n, s)
elif op == b'func':
symbols = {
b'all': lambda n, s: True,
b'none': lambda n, s: False,
b'size': lambda n, s: _sizep(tree[2])(s),
}
name = filesetlang.getsymbol(tree[1])
if name in symbols:
return symbols[name]
raise error.UnknownIdentifier(name, symbols.keys())
elif op == b'minus': # equivalent to 'x and not y'
func1 = _compile(tree[1])
func2 = _compile(tree[2])
return lambda n, s: func1(n, s) and not func2(n, s)
elif op == b'list':
raise error.ParseError(
_(b"can't use a list in this context"),
hint=_(b'see \'hg help "filesets.x or y"\''),
)
raise error.ProgrammingError(b'illegal tree: %r' % (tree,))
def compile(text):
"""generate a function (path, size) -> bool from filter specification.
"text" could contain the operators defined by the fileset language for
common logic operations, and parenthesis for grouping. The supported path
tests are '**.extname' for file extension test, and '"path:dir/subdir"'
for prefix test. The ``size()`` predicate is borrowed from filesets to test
file size. The predicates ``all()`` and ``none()`` are also supported.
'(**.php & size(">10MB")) | **.zip | (path:bin & !path:bin/README)' for
example, will catch all php files whose size is greater than 10 MB, all
files whose name ends with ".zip", and all files under "bin" in the repo
root except for "bin/README".
"""
tree = filesetlang.parse(text)
tree = filesetlang.analyze(tree)
tree = filesetlang.optimize(tree)
return _compile(tree)
| smmribeiro/intellij-community | plugins/hg4idea/testData/bin/mercurial/minifileset.py | Python | apache-2.0 | 3,620 |
import os
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django_jinja.backend import Jinja2
from mock import ANY, patch
from lib import l10n_utils
ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'test_files')
TEMPLATE_DIRS = (os.path.join(ROOT, 'templates'),)
jinja_env = Jinja2.get_default()
@patch.object(jinja_env.env.loader, 'searchpath', TEMPLATE_DIRS)
@override_settings(
ROOT=ROOT,
DEV=False,
ROOT_URLCONF='lib.l10n_utils.tests.test_files.urls',
)
class TestRender(TestCase):
def _test(self, path, template, locale, accept_lang, status, destination=None,
active_locales=None, add_active_locales=None):
request = RequestFactory().get(path)
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_lang
request.locale = locale
ctx = {}
if active_locales:
ctx['active_locales'] = active_locales
if add_active_locales:
ctx['add_active_locales'] = add_active_locales
response = l10n_utils.render(request, template, ctx)
if status == 302:
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], destination)
self.assertEqual(response['Vary'], 'Accept-Language')
else:
self.assertEqual(response.status_code, 200)
def test_firefox(self):
path = '/firefox/new/'
template = 'firefox/new.html'
locales = ['en-US', 'en-GB', 'fr', 'es-ES']
# Nothing to do with a valid locale
self._test(path, template, 'en-US', 'en-us,en;q=0.5',
200, active_locales=locales)
# en-GB is activated on /firefox/new/
self._test(path, template, 'en-GB', 'en-gb,en;q=0.5',
200, active_locales=locales)
# fr-FR should be treated as fr
self._test(path, template, 'fr-FR', 'fr-fr',
302, '/fr/firefox/new/', active_locales=locales)
# Should fallback to the user's second preferred language
self._test(path, template, 'zu', 'zu,fr;q=0.7,en;q=0.3',
302, '/fr/firefox/new/', active_locales=locales)
# Should fallback to one of the site's fallback languages
self._test(path, template, 'es-CL', 'es-CL,es;q=0.7,en;q=0.3',
302, '/es-ES/firefox/new/', active_locales=locales)
@patch.object(l10n_utils, 'translations_for_template')
def test_add_active_locales(self, tft_mock):
path = '/firefox/new/'
template = 'firefox/new.html'
locales = ['en-US', 'en-GB']
tft_mock.return_value = ['fr', 'es-ES']
# expect same results as above, but with locales from different sources
# Nothing to do with a valid locale
self._test(path, template, 'en-US', 'en-us,en;q=0.5',
200, add_active_locales=locales)
# en-GB is activated on /firefox/new/
self._test(path, template, 'en-GB', 'en-gb,en;q=0.5',
200, add_active_locales=locales)
# fr-FR should be treated as fr
self._test(path, template, 'fr-FR', 'fr-fr',
302, '/fr/firefox/new/', add_active_locales=locales)
# Should fallback to the user's second preferred language
self._test(path, template, 'zu', 'zu,fr;q=0.7,en;q=0.3',
302, '/fr/firefox/new/', add_active_locales=locales)
# Should fallback to one of the site's fallback languages
self._test(path, template, 'es-CL', 'es-CL,es;q=0.7,en;q=0.3',
302, '/es-ES/firefox/new/', add_active_locales=locales)
def test_ftl_files_unmodified(self):
"""A list passed to the ftl_files parameter should not be modified in place"""
ftl_files = ['dude', 'walter']
path = '/firefox/new/'
template = 'firefox/new.html'
req = RequestFactory().get(path)
l10n_utils.render(req, template, ftl_files=ftl_files)
assert ftl_files == ['dude', 'walter']
class TestGetAcceptLanguages(TestCase):
def _test(self, accept_lang, list):
request = RequestFactory().get('/')
request.META['HTTP_ACCEPT_LANGUAGE'] = accept_lang
self.assertEqual(l10n_utils.get_accept_languages(request), list)
def test_valid_lang_codes(self):
"""
Should return a list of valid lang codes
"""
self._test('fr-FR', ['fr'])
self._test('en-us,en;q=0.5', ['en-US', 'en'])
self._test('pt-pt,fr;q=0.8,it-it;q=0.5,de;q=0.3',
['pt-PT', 'fr', 'it', 'de'])
self._test('ja-JP-mac,ja-JP;q=0.7,ja;q=0.3', ['ja'])
self._test('foo,bar;q=0.5', ['foo', 'bar'])
def test_invalid_lang_codes(self):
"""
Should return a list of valid lang codes or an empty list
"""
self._test('', [])
self._test('en_us,en*;q=0.5', [])
self._test('Chinese,zh-cn;q=0.5', ['zh-CN'])
@patch.object(l10n_utils, 'render')
class TestL10nTemplateView(TestCase):
def setUp(self):
self.req = RequestFactory().get('/')
def test_ftl_files(self, render_mock):
view = l10n_utils.L10nTemplateView.as_view(template_name='dude.html',
ftl_files='dude')
view(self.req)
render_mock.assert_called_with(self.req, ['dude.html'], ANY, ftl_files='dude')
def test_ftl_files_map(self, render_mock):
view = l10n_utils.L10nTemplateView.as_view(template_name='dude.html',
ftl_files_map={'dude.html': 'dude'})
view(self.req)
render_mock.assert_called_with(self.req, ['dude.html'], ANY, ftl_files='dude')
# no match means no FTL files
view = l10n_utils.L10nTemplateView.as_view(template_name='dude.html',
ftl_files_map={'donny.html': 'donny'})
view(self.req)
render_mock.assert_called_with(self.req, ['dude.html'], ANY, ftl_files=None)
| ericawright/bedrock | lib/l10n_utils/tests/test_base.py | Python | mpl-2.0 | 6,090 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideaworks.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| dstl/ideaworks | backend/ideaworks/manage.py | Python | agpl-3.0 | 272 |
#coding=utf-8
import pygame
from pygame.sprite import Sprite
class Bullet(Sprite):
"""子弹配置类"""
def __init__(self, ai_settings, screen, ship):
#在飞船所在位置创建一个子弹对象
super(Bullet, self).__init__()
self.screen = screen
#在(0, 0)处创建一个表示子弹的矩形, 在设置正确的位置
self.rect = pygame.Rect(0, 0, ai_settings.bullet_width,
ai_settings.bullet_height)
self.rect.centerx = ship.rect.centerx
self.rect.top = ship.rect.top
#存储子弹位置,用小数
self.y = float(self.rect.y)
self.color = ai_settings.bullet_color
self.speed_factor = ai_settings.bullet_speed_factor
def update(self):
#向上移动子弹
#更新表示子弹位置的小数值
self.y -= self.speed_factor
#更新子弹rect位置
self.rect.y = self.y
def draw_bullet(self):
#绘制子弹
pygame.draw.rect(self.screen, self.color, self.rect)
| gavin20160901/Alien-invasion | bullet.py | Python | mit | 1,084 |
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from . import views
app_name = "users"
urlpatterns = [
url (r'^checkusers/$', views.UsersView.as_view(), name = 'checkusers'),
url (r'^register/$', views.RegisterView, name = 'register'),
url (r'^settings/$', views.SettingsView, name = 'settings'),
url (r'^login/$', auth_views.login, {'template_name': 'main/login.html'}, name = 'login'),
url (r'^logout/$', auth_views.logout, {'next_page': '/'}, name = 'logout'),
]
| AtenrevCode/scChat | users/urls.py | Python | mit | 524 |
"""Utilities for exploiting active subspaces in response surfaces."""
import numpy as np
import utils.designs as dn
from utils.simrunners import SimulationRunner
from utils.misc import conditional_expectations
from utils.response_surfaces import RadialBasisApproximation
from domains import UnboundedActiveVariableDomain, BoundedActiveVariableDomain, \
ActiveVariableMap
class ActiveSubspaceResponseSurface():
"""
A class for using active subspace with response surfaces.
:param ResponseSurface respsurf: `respsurf` is a
utils.response_surfaces.ResponseSurface.
:param ActiveVariableMap avmap: a domains.ActiveVariableMap.
**Notes**
This class has several convenient functions for training and using a
response surface with active subspaces. Note that the `avmap` must be
given. This means that the active subspace must be computed already.
"""
respsurf = None
avmap = None
def __init__(self, avmap, respsurf=None):
"""
Initialize an ActiveSubspaceResponseSurface.
:param ActiveVariableMap avmap: A domains.ActiveVariable map that
includes the active variable domain, which includes the active and
inactive subspaces.
:param ResponseSurface respsurf: A
utils.response_surfaces.ResponseSurface object. If a ResponseSurface
is not given, a default RadialBasisApproximation is used.
"""
if not isinstance(avmap, ActiveVariableMap):
raise TypeError('avmap should be an ActiveVariableMap.')
if respsurf == None:
self.respsurf = RadialBasisApproximation()
else:
self.respsurf = respsurf
self.avmap = avmap
def _train(self, Y, f, v=None):
"""
A private function for training the response surface with a set of
active variable and function evaluations.
"""
if isinstance(self.respsurf, RadialBasisApproximation):
evals = self.avmap.domain.subspaces.eigenvalues
self.respsurf.train(Y, f, v=v, e=evals)
else:
self.respsurf.train(Y, f)
def train_with_data(self, X, f, v=None):
"""
Train the response surface with input/output pairs.
:param ndarray X: M-by-m matrix with evaluations of the simulation
inputs.
:param ndarray f: M-by-1 matrix with corresponding simulation quantities
of interest.
:param ndarray v: M-by-1 matrix that contains the regularization
(i.e., errors) associated with `f`.
**Notes**
The training methods exploit the eigenvalues from the active subspace
analysis to determine length scales for each variable when tuning
the parameters of the radial bases.
The method sets attributes of the object for further use.
"""
Y = self.avmap.forward(X)[0]
self._train(Y, f, v=v)
def train_with_interface(self, fun, N, NMC=10):
"""
Train the response surface with input/output pairs.
:param function fun: A function that returns the simulation quantity of
interest given a point in the input space as an 1-by-m ndarray.
:param int N: The number of points used in the design-of-experiments for
constructing the response surface.
:param int NMC: The number of points used to estimate the conditional
expectation and conditional variance of the function given a value
of the active variables.
**Notes**
The training methods exploit the eigenvalues from the active subspace
analysis to determine length scales for each variable when tuning
the parameters of the radial bases.
The method sets attributes of the object for further use.
The method uses the response_surfaces.av_design function to get the
design for the appropriate `avmap`.
"""
Y, X, ind = av_design(self.avmap, N, NMC=NMC)
if isinstance(self.avmap.domain, BoundedActiveVariableDomain):
X = np.vstack((X, self.avmap.domain.vertX))
Y = np.vstack((Y, self.avmap.domain.vertY))
il = np.amax(ind) + 1
iu = np.amax(ind) + self.avmap.domain.vertX.shape[0] + 1
iind = np.arange(il, iu)
ind = np.vstack(( ind, iind.reshape((iind.size,1)) ))
# run simulation interface at all design points
if isinstance(fun, SimulationRunner):
f = fun.run(X)
else:
f = SimulationRunner(fun).run(X)
Ef, Vf = conditional_expectations(f, ind)
self._train(Y, Ef, v=Vf)
def predict_av(self, Y, compgrad=False):
"""
Compute the value of the response surface given values of the active
variables.
:param ndarray Y: M-by-n matrix containing points in the range of active
variables to evaluate the response surface.
:param bool compgrad: Determines if the gradient of the response surface
with respect to the active variables is computed and returned.
:return: f, contains the response surface values at the given `Y`.
:rtype: ndarray
:return: df, Contains the response surface gradients at the given `Y`.
If `compgrad` is False, then `df` is None.
:rtype: ndarray
"""
f, df = self.respsurf.predict(Y, compgrad)
return f, df
def gradient_av(self, Y):
"""
A convenience function for computing the gradient of the response
surface with respect to the active variables.
:param ndarray Y: M-by-n matrix containing points in the range of active
variables to evaluate the response surface gradient.
:return: df, Contains the response surface gradient at the given `Y`.
:rtype: ndarray
"""
df = self.respsurf.predict(Y, compgrad=True)[1]
return df
def predict(self, X, compgrad=False):
"""
Compute the value of the response surface given values of the simulation
variables.
:param ndarray X: M-by-m matrix containing points in simulation's
parameter space.
:param bool compgrad: Determines if the gradient of the response surface
is computed and returned.
:return: f, Contains the response surface values at the given `X`.
:rtype: ndarray
:return: dfdx, An ndarray of shape M-by-m that contains the estimated
gradient at the given `X`. If `compgrad` is False, then `dfdx` is
None.
:rtype: ndarray
"""
Y = self.avmap.forward(X)[0]
f, dfdy = self.predict_av(Y, compgrad)
if compgrad:
W1 = self.avmap.domain.subspaces.W1
dfdx = np.dot(dfdy, W1.T)
else:
dfdx = None
return f, dfdx
def gradient(self, X):
"""
A convenience function for computing the gradient of the response
surface with respect to the simulation inputs.
:param ndarray X: M-by-m matrix containing points in the space of
simulation inputs.
:return: df, Contains the response surface gradient at the given `X`.
:rtype: ndarray
"""
return self.predict(X, compgrad=True)[1]
def __call__(self, X):
return self.predict(X)[0]
def av_design(avmap, N, NMC=10):
"""
A wrapper that returns the design for the response surface in the space of
the active variables.
:param ActiveVariableMap avmap: A domains.ActiveVariable map that includes
the active variable domain, which includes the active and inactive
subspaces.
:param int N: The number of points used in the design-of-experiments for
constructing the response surface.
:param int NMC: The number of points used to estimate the conditional
expectation and conditional variance of the function given a value
of the active variables. (Default is 10)
:return: Y, N-by-n matrix that contains the design points in the space of
active variables.
:rtype: ndarray
:return: X, (N*NMC)-by-m matrix that contains points in the simulation input
space to run the simulation.
:rtype: ndarray
:return: ind, Indices that map points in `X` to points in `Y`.
:rtype: ndarray
**See Also**
utils.designs.gauss_hermite_design
utils.designs.interval_design
utils.designs.maximin_design
"""
if not isinstance(avmap, ActiveVariableMap):
raise TypeError('avmap should be an ActiveVariableMap.')
# interpret N as total number of points in the design
if not isinstance(N, int):
raise Exception('N should be an integer.')
if not isinstance(NMC, int):
raise Exception('NMC should be an integer.')
m, n = avmap.domain.subspaces.W1.shape
if isinstance(avmap.domain, UnboundedActiveVariableDomain):
NN = [int(np.floor(np.power(N, 1.0/n))) for i in range(n)]
Y = dn.gauss_hermite_design(NN)
elif isinstance(avmap.domain, BoundedActiveVariableDomain):
if n==1:
a, b = avmap.domain.vertY[0,0], avmap.domain.vertY[1,0]
Y = dn.interval_design(a, b, N)
else:
vertices = avmap.domain.vertY
Y = dn.maximin_design(vertices, N)
else:
raise Exception('There is a problem with the avmap.domain.')
X, ind = avmap.inverse(Y, NMC)
return Y, X, ind
| meyersw3476/active_subspaces | active_subspaces/response_surfaces.py | Python | mit | 9,553 |
# -*- coding: utf-8 -*-
'''
Runners Directory
'''
| MadeiraCloud/salt | sources/salt/runners/__init__.py | Python | apache-2.0 | 50 |
import click
from oktawave.api import CloneType, DICT as OktawaveConstants
from oktawave.commands.context import NamedItemParam, pass_context, OktawaveCliGroup, positional_option
from oktawave.commands.oci import clone_type_param, template_id_param, oci_class_param, subregion_param
from oktawave.commands.util import show_template_category, show_oci_logs, show_oci_settings, show_template_info
from oktawave.exceptions import OktawaveORDBInvalidTemplateError
class ORDBParam(NamedItemParam):
name = 'ORDB instance name/id'
label = 'ORDB'
@classmethod
def list_items(cls, api):
for item in api.ORDB_List():
yield item['id'], item['name']
def ordb_id_param(*args, **kwargs):
kwargs.setdefault('help', 'ORDB instance name or ID (as returned by ORDB List)')
kwargs.setdefault('type', ORDBParam())
return positional_option(*args, **kwargs)
def db_name_param(*args, **kwargs):
kwargs.setdefault('help', 'logical database name')
return positional_option(*args, **kwargs)
@click.group(cls=OktawaveCliGroup, name='ORDB')
def ORDB():
"""Manage database instances and logical databases"""
pass
@ORDB.command()
@pass_context
def ORDB_List(ctx):
"""List database instances"""
dbs = ctx.api.ORDB_List()
def fmt(db):
return [
db['id'],
db['name'],
db['type'],
db['size'],
db['available_space']
]
ctx.print_table(
['Virtual machine ID', 'Name', 'Type', 'Size', 'Available space'],
dbs, fmt)
@ORDB.command()
@ordb_id_param('ordb_id')
@pass_context
def ORDB_TurnOn(ctx, ordb_id):
"""Turn a database instance on"""
ctx.api.ORDB_TurnOn(ordb_id)
@ORDB.command()
@ordb_id_param('ordb_id')
@pass_context
def ORDB_TurnOff(ctx, ordb_id):
"""Turn a database instance off"""
ctx.api.ORDB_TurnOff(ordb_id)
@ORDB.command()
@ordb_id_param('ordb_id')
@pass_context
def ORDB_Restart(ctx, ordb_id):
"""Restart a database instance"""
ctx.api.ORDB_Restart(ordb_id)
@ORDB.command(epilog="If db_name is not specified, delete whole instance")
@ordb_id_param('ordb_id')
@db_name_param('db_name', required=False)
@pass_context
def ORDB_Delete(ctx, ordb_id, db_name):
"""Delete a logical database or database instance"""
ctx.api.ORDB_Delete(ordb_id, db_name)
@ORDB.command()
@pass_context
def ORDB_Templates(ctx):
"""List database VM templates"""
print "\nCategory: MySQL"
show_template_category(ctx, OktawaveConstants['MYSQL_TEMPLATE_CATEGORY'], 'ORDB')
print "Category: PostgreSQL"
show_template_category(ctx, OktawaveConstants['POSTGRESQL_TEMPLATE_CATEGORY'], 'ORDB')
@ORDB.command(
epilog="""
Runtime: new root/administrator password will be generated, new host name
set etc. (Unmodified tech-support account required on OCI)
AbsoluteCopy: initialization process will be skipped only new IP address
and domain name will be assigned.
"""
)
@ordb_id_param('ordb_id')
@positional_option('name', help='new ORDB instance name')
@clone_type_param('clone_type')
@pass_context
def ORDB_Clone(ctx, ordb_id, name, clone_type):
"""Clone an ORDB instance"""
clone_type = getattr(CloneType, clone_type)
ctx.api.OCI_Clone(ordb_id, name, clone_type)
@ORDB.command()
@ordb_id_param('ordb_id')
@pass_context
def ORDB_Logs(ctx, ordb_id):
"""Show ORDB virtual machine logs"""
show_oci_logs(ctx, ordb_id)
@ORDB.command()
@ordb_id_param('ordb_id')
@pass_context
def ORDB_LogicalDatabases(ctx, ordb_id):
"""Shows logical databases"""
dbs = ctx.api.ORDB_LogicalDatabases(ordb_id)
def fmt(db):
return [
db['id'],
db['name'],
db['type'],
db['encoding'],
'Yes' if db['is_running'] else 'No',
db['QPS'],
db['Size']
]
ctx.print_table(
['Virtual machine ID', 'Name', 'Type', 'Encoding', 'Running', 'QPS', 'Size'],
dbs, fmt)
@ORDB.command()
@ordb_id_param('ordb_id')
@pass_context
def ORDB_Settings(ctx, ordb_id):
"""Show ORDB settings"""
show_oci_settings(ctx, ordb_id)
@ORDB.command(name='Create', short_help='Create a new ORDB')
@positional_option('name', help='new ORDB instance name')
@template_id_param('template', help='template ID (as returned by ORDB Templates)')
@oci_class_param('oci_class', required=False)
@subregion_param('--subregion')
@pass_context
def ORDB_Create(ctx, name, template, oci_class, subregion):
"""Create a database VM"""
try:
ctx.api.ORDB_Create(name, template, oci_class=oci_class, subregion=subregion)
except OktawaveORDBInvalidTemplateError:
print "ERROR: Selected template is not a database template"
return 1
@ORDB.command()
@ordb_id_param('ordb_id')
def ORDB_GlobalSettings(ctx, ordb_id):
"""Show global database engine settings"""
settings = ctx.api.ORDB_GlobalSettings(ordb_id)
def fmt(item):
return [item['name'], item['value']]
ctx.print_table(['Name', 'Value'], settings, fmt)
@ORDB.command()
@template_id_param('template_id')
@pass_context
def ORDB_TemplateInfo(ctx, template_id):
"""Show more detailed info about a particular template"""
show_template_info(ctx, template_id)
@ORDB.command()
@ordb_id_param('ordb_id')
@db_name_param('name')
@positional_option('encoding', help='database character encoding',
type=click.Choice(['utf8', 'latin2']), default='utf8')
@pass_context
def ORDB_CreateLogicalDatabase(ctx, ordb_id, name, encoding):
"""Create a new logical database within an instance"""
ctx.api.ORDB_CreateLogicalDatabase(ordb_id, name, encoding)
print "OK"
@ORDB.command()
@ordb_id_param('ordb_id')
@db_name_param('name')
@pass_context
def ORDB_BackupLogicalDatabase(ctx, ordb_id, name):
"""Create a backup of logical database"""
ctx.api.ORDB_BackupLogicalDatabase(ordb_id, name)
print "OK"
@ORDB.command()
@ordb_id_param('id_from', help='source ORDB name or ID (as returned by ORDB List)')
@ordb_id_param('id_to', help='destination ORDB name or ID (as returned by ORDB List)')
@db_name_param('name')
@pass_context
def ORDB_MoveLogicalDatabase(ctx, id_from, id_to, name):
"""Move a backup of logical database between ORDB instances"""
ctx.api.ORDB_MoveLogicalDatabase(id_from, id_to, name)
print "OK"
@ORDB.command()
@pass_context
def ORDB_Backups(ctx):
"""List logical database backups"""
backups = ctx.api.ORDB_Backups()
def fmt(b):
return [b['file_name'], b['type'], b['path']]
ctx.print_table(
['File name', 'Database type', 'Full path'],
backups, fmt)
@ORDB.command()
@ordb_id_param('ordb_id')
@positional_option('backup_file', help='backup file name (as returned by ORDB Backups)')
@db_name_param('name')
@pass_context
def ORDB_RestoreLogicalDatabase(ctx, ordb_id, name, backup_file):
"""Restore a backup of logical database"""
ctx.api.ORDB_RestoreLogicalDatabase(ordb_id, name, backup_file)
print "OK"
| gnosek/oktawave-cli | oktawave/commands/ordb.py | Python | gpl-3.0 | 7,032 |
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: aci_firmware_policy
short_description: This creates a firmware policy
version_added: "2.8"
description:
- This module creates a firmware policy for firmware groups. The firmware policy is create first and then
- referenced by the firmware group. You will assign the firmware and specify if you want to ignore the compatibility
- check
options:
name:
description:
- Name of the firmware policy
required: true
version:
description:
- The version of the firmware assoicated with this policy. This value is very import as well as constructing
- it correctly. The syntax for this field is n9000-xx.x. If you look at the firmware repository using the UI
- each version will have a "Full Version" column, this is the value you need to use. So, if the Full Version
- is 13.1(1i), the value for this field would be n9000-13.1(1i)
required: true
ignoreCompat:
description:
- Check if compatibility checks should be ignored
required: false
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
default: present
choices: ['absent', 'present', 'query']
extends_documentation_fragment:
- ACI
author:
- Steven Gerhart (@sgerhart)
'''
EXAMPLES = '''
- name: firmware policy
aci_firmware_policy:
host: "{{ inventory_hostname }}"
username: "{{ user }}"
password: "{{ pass }}"
validate_certs: no
name: test2FrmPol
version: n9000-13.2(1m)
ignoreCompat: False
state: present
'''
RETURN = '''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
name=dict(type='str', aliases=['name']), # Not required for querying all objects
version=dict(type='str', aliases=['version']),
ignoreCompat=dict(type=bool),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['name']],
['state', 'present', ['name', 'version']],
],
)
state = module.params['state']
name = module.params['name']
version = module.params['version']
if module.params['ignoreCompat']:
ignore = 'yes'
else:
ignore = 'no'
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='firmwareFwP',
aci_rn='fabric/fwpol-{0}'.format(name),
target_filter={'name': name},
module_object=name,
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='firmwareFwP',
class_config=dict(
name=name,
version=version,
ignoreCompat=ignore,
),
)
aci.get_diff(aci_class='firmwareFwP')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| alxgu/ansible | lib/ansible/modules/network/aci/aci_firmware_policy.py | Python | gpl-3.0 | 6,423 |
'''
Developed by Samuel Tanner Lindemer 2015
Le Laboaratoire des Signaux et Systemes
Centrale-Supelec Universite, Gif-sur-Yvette, France
[email protected]
'''
import logging; logger = logging.getLogger('morse.' + __name__)
import morse.core.sensor
from morse.core import blenderapi
from morse.helpers.components import add_data
class KeySensor(morse.core.sensor.Sensor):
_name = 'KeySensor'
_short_desc = 'Provides a data stream of keyboard events from the Blender API.'
add_data('up', False, 'boolean', 'up arrow key sensor')
add_data('down', False, 'boolean', 'down arrow key sensor')
add_data('left', False, 'boolean', 'left arrow key sensor')
add_data('right', False, 'boolean', 'right arrow key sensor')
add_data('i', False, 'boolean', 'i key sensor')
add_data('j', False, 'boolean', 'j key sensor')
add_data('k', False, 'boolean', 'k key sensor')
add_data('l', False, 'boolean', 'l key sensor')
def __init__(self, obj, parent=None):
logger.info("%s initialization" % obj.name)
morse.core.sensor.Sensor.__init__(self, obj, parent)
logger.info('Component initialized')
def default_action(self):
keyboard = blenderapi.keyboard()
is_actived = blenderapi.input_active()
if keyboard.events[blenderapi.UPARROWKEY] == is_actived:
self.local_data['up'] = True
else: self.local_data['up'] = False
if keyboard.events[blenderapi.DOWNARROWKEY] == is_actived:
self.local_data['down'] = True
else: self.local_data['down'] = False
if keyboard.events[blenderapi.LEFTARROWKEY] == is_actived:
self.local_data['left'] = True
else: self.local_data['left'] = False
if keyboard.events[blenderapi.RIGHTARROWKEY] == is_actived:
self.local_data['right'] = True
else: self.local_data['right'] = False
if keyboard.events[blenderapi.IKEY] == is_actived:
self.local_data['i'] = True
else: self.local_data['i'] = False
if keyboard.events[blenderapi.JKEY] == is_actived:
self.local_data['j'] = True
else: self.local_data['j'] = False
if keyboard.events[blenderapi.KKEY] == is_actived:
self.local_data['k'] = True
else: self.local_data['k'] = False
if keyboard.events[blenderapi.LKEY] == is_actived:
self.local_data['l'] = True
else: self.local_data['l'] = False
| motet/baldr | src/baldr/sensors/keysensor.py | Python | gpl-2.0 | 2,259 |
#!/usr/bin/python
"""
The purpose of this tron bot is simply to run with a minimal amount of
intelligence behind each move.
The algorithm is based on the wallbot but rather than looking to move
along the wall, it looks to move - if possible - to a location with
one open adjacent move.
Following bot based on wallbot from the tron.aichallenge website.
(The original was intended to look farther ahead but I got caught up
in the details - basically I was trying
"""
import tron, random
ORDER = list(tron.DIRECTIONS)
random.shuffle(ORDER)
def which_move(board):
for i in ORDER:
decision = board.moves()[0]
#looking at area directly surrounding
landing = board.rel(i)
#finding legal moves within surrounding area
if not board.passable(landing):
continue
#defining the squares adjacent to chosen legal move (looking 2 steps ahead)
adj = board.adjacent(landing)
if any(board[place] == tron.FLOOR for place in adj):
decision = i
break
#for place in adj:
#defining the squares 3 steps ahead
#adj2 = board.adjacent(place)
#if 2 and 3 steps ahead are open, move to the place that allows for that
#if any (board[place] == tron.FLOOR and
# ( board[p2] == tron.FLOOR for p2 in adj2 )):
return decision
for board in tron.Board.generate():
tron.move(which_move(board))
| CorySpitzer/tron-engine | bots/annabot.py | Python | bsd-2-clause | 1,769 |
# Copyright 2015-2016 Florian Bruhin (The Compiler) <[email protected]>
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.misc.checkpyver."""
import re
import sys
import subprocess
import unittest.mock
import pytest
from qutebrowser.misc import checkpyver
TEXT = (r"At least Python 3.4 is required to run qutebrowser, but "
r"\d+\.\d+\.\d+ is installed!\n")
@pytest.mark.not_frozen
def test_python2():
"""Run checkpyver with python 2."""
try:
proc = subprocess.Popen(['python2', checkpyver.__file__,
'--no-err-windows'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
except FileNotFoundError:
pytest.skip("python2 not found")
assert not stdout
stderr = stderr.decode('utf-8')
assert re.match(TEXT, stderr), stderr
assert proc.returncode == 1
def test_normal(capfd):
checkpyver.check_python_version()
out, err = capfd.readouterr()
assert not out
assert not err
def test_patched_no_errwindow(capfd, monkeypatch):
"""Test with a patched sys.hexversion and --no-err-windows."""
monkeypatch.setattr('qutebrowser.misc.checkpyver.sys.argv',
[sys.argv[0], '--no-err-windows'])
monkeypatch.setattr('qutebrowser.misc.checkpyver.sys.hexversion',
0x03000000)
monkeypatch.setattr('qutebrowser.misc.checkpyver.sys.exit',
lambda status: None)
checkpyver.check_python_version()
stdout, stderr = capfd.readouterr()
assert not stdout
assert re.match(TEXT, stderr), stderr
def test_patched_errwindow(capfd, mocker, monkeypatch):
"""Test with a patched sys.hexversion and a fake Tk."""
monkeypatch.setattr('qutebrowser.misc.checkpyver.sys.hexversion',
0x03000000)
monkeypatch.setattr('qutebrowser.misc.checkpyver.sys.exit',
lambda status: None)
try:
import tkinter
except ImportError:
tk_mock = mocker.patch('qutebrowser.misc.checkpyver.Tk',
spec=['withdraw'], new_callable=mocker.Mock)
msgbox_mock = mocker.patch('qutebrowser.misc.checkpyver.messagebox',
spec=['showerror'])
else:
tk_mock = mocker.patch('qutebrowser.misc.checkpyver.Tk', autospec=True)
msgbox_mock = mocker.patch('qutebrowser.misc.checkpyver.messagebox',
autospec=True)
checkpyver.check_python_version()
stdout, stderr = capfd.readouterr()
assert not stdout
assert not stderr
tk_mock.assert_called_with()
tk_mock().withdraw.assert_called_with()
msgbox_mock.showerror.assert_called_with("qutebrowser: Fatal error!",
unittest.mock.ANY)
| haxwithaxe/qutebrowser | tests/unit/misc/test_checkpyver.py | Python | gpl-3.0 | 3,598 |
# vim: expandtab ts=4 sw=4 sts=4 fileencoding=utf-8:
#
# Copyright (C) 2007-2011 GNS3 Development Team (http://www.gns3.net/team).
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# http://www.gns3.net/contact
#
#debuglevel: 0=disabled, 1=default, 2=debug, 3=deep debug
debuglevel = 0
def debugmsg(level, message):
if debuglevel == 0:
return
if debuglevel >= level:
print message
import os, re, random, base64, traceback, time, glob
import GNS3.Globals as globals
import GNS3.Dynagen.dynagen as dynagen_namespace
import GNS3.Dynagen.dynamips_lib as lib
import GNS3.Dynagen.qemu_lib as qlib
import GNS3.Dynagen.dynagen_vbox_lib as vboxlib
from GNS3.Globals.Symbols import SYMBOLS
from GNS3.Utils import translate, debug, error, nvram_export
from PyQt4 import QtGui, QtCore, QtSvg
from GNS3.Annotation import Annotation
from GNS3.Pixmap import Pixmap
from GNS3.ShapeItem import AbstractShapeItem
from GNS3.ShapeItem import Rectangle
from GNS3.ShapeItem import Ellipse
from GNS3.Config.Objects import iosImageConf, hypervisorConf
from GNS3.Node.AbstractNode import AbstractNode
from GNS3.Node.DecorativeNode import DecorativeNode, init_decoration_id
from GNS3.Node.IOSRouter import IOSRouter, init_router_id
from GNS3.Node.ATMSW import init_atmsw_id
from GNS3.Node.ATMBR import init_atmbr_id
from GNS3.Node.ETHSW import ETHSW, init_ethsw_id
from GNS3.Node.Hub import Hub, init_hub_id
from GNS3.Node.FRSW import init_frsw_id
from GNS3.Node.Cloud import Cloud, init_cloud_id
from GNS3.Node.AnyEmuDevice import init_emu_id, AnyEmuDevice
from GNS3.Node.AnyVBoxEmuDevice import init_vbox_emu_id, AnyVBoxEmuDevice
from __main__ import GNS3_RUN_PATH, VERSION
router_hostname_re = re.compile(r"""^R([0-9]+)""")
ethsw_hostname_re = re.compile(r"""^SW([0-9]+)""")
hub_hostname_re = re.compile(r"""^HUB([0-9]+)""")
frsw_hostname_re = re.compile(r"""^FR([0-9]+)""")
atmsw_hostname_re = re.compile(r"""^ATM([0-9]+)""")
atmbr_hostname_re = re.compile(r"""^BR([0-9]+)""")
cloud_hostname_re = re.compile(r"""^C([0-9]+)""")
emu_hostname_re = re.compile(r"""^[PIX|JUNOS|ASA|AWP|IDS|QEMU]([0-9]+)""")
vbox_emu_hostname_re = re.compile(r"""^[VBOX]([0-9]+)""")
decorative_hostname_re = re.compile(r"""^N([0-9]+)""")
class NETFile(object):
""" NETFile implementing the .net file import/export
"""
def __init__(self):
debugmsg(2, "NETFile::__init__()")
self.dynagen = globals.GApp.dynagen
self.connection2cloud = {}
self.decorative_node_connections = {}
def add_in_connection_list(self, connection_data, connection_list):
""" Record the connection in connection_list
"""
debugmsg(2, "NETFile::add_in_connection_list()")
(source_device, source_interface, destination_device, destination_interface) = connection_data
# don't want to record bidirectionnal connections
for connection in connection_list:
(list_source_device, list_source_interface, list_destination_device, list_destination_interface) = connection
if source_device == list_destination_device and source_interface == list_destination_interface:
return
connection_list.append(connection_data)
def populate_connection_list_for_router(self, device, connection_list):
""" Add router connections in connection_list
"""
debugmsg(2, "NETFile::populate_connection_list_for_router()")
for adapter in device.slot:
if adapter:
for interface in adapter.interfaces:
for dynagenport in adapter.interfaces[interface]:
i = adapter.interfaces[interface][dynagenport]
nio = adapter.nio(i)
#if it is a UDP NIO, find the reverse NIO and create output based on what type of device is on the other end
if nio != None:
if adapter.router.model_string in ['1710', '1720', '1721', '1750']:
source_interface = interface.lower() + str(dynagenport)
else:
source_interface = interface.lower() + str(adapter.slot) + "/" + str(dynagenport)
nio_str = nio.config_info()
if nio_str.lower()[:3] == 'nio':
connection_list.append((device.name, source_interface, "nio", nio_str))
else:
(remote_device, remote_adapter, remote_port) = lib.get_reverse_udp_nio(nio)
if isinstance(remote_device, lib.Router):
(rem_int_name, rem_dynagen_port) = remote_adapter.interfaces_mips2dyn[remote_port]
if remote_device.model_string in ['1710', '1720', '1721', '1750']:
self.add_in_connection_list((device.name, source_interface, remote_device.name, rem_int_name + str(rem_dynagen_port)), connection_list)
else:
self.add_in_connection_list((device.name, source_interface, remote_device.name, rem_int_name + str(remote_adapter.slot) + "/" + str(rem_dynagen_port)),
connection_list)
elif isinstance(remote_device, lib.FRSW) or isinstance(remote_device, lib.ATMSW) or isinstance(remote_device, lib.ETHSW) or isinstance(remote_device, lib.Hub) or isinstance(remote_device, lib.ATMBR):
connection_list.append((device.name, source_interface, remote_device.name, str(remote_port)))
elif isinstance(remote_device, qlib.AnyEmuDevice) or isinstance(remote_device, vboxlib.AnyVBoxEmuDevice):
connection_list.append((device.name, source_interface, remote_device.name, remote_adapter + str(remote_port)))
def populate_connection_list_for_emulated_device(self, device, connection_list):
""" Add emulated device connections in connection_list
"""
debugmsg(2, "NETFile::populate_connection_list_for_emulated_device()")
for port in device.nios:
if device.nios[port] != None:
(remote_device, remote_adapter, remote_port) = lib.get_reverse_udp_nio(device.nios[port])
if isinstance(remote_device, qlib.AnyEmuDevice):
self.add_in_connection_list((device.name, 'e' + str(port), remote_device.name, remote_adapter + str(remote_port)), connection_list)
elif isinstance(remote_device, vboxlib.AnyVBoxEmuDevice):
self.add_in_connection_list((device.name, 'e' + str(port), remote_device.name, remote_adapter + str(remote_port)), connection_list)
elif isinstance(remote_device, lib.ETHSW):
connection_list.append((device.name, 'e' + str(port), remote_device.name, str(remote_port)))
elif isinstance(remote_device, lib.Hub):
connection_list.append((device.name, 'e' + str(port), remote_device.name, str(remote_port)))
def populate_connection_list_for_emulated_switch(self, device, connection_list):
""" Add emulated switch connections in connection_list
"""
debugmsg(2, "NETFile::populate_connection_list_for_emulated_switch()")
if isinstance(device, lib.ETHSW):
keys = device.mapping.keys()
for port in keys:
nio_port = device.nio(port)
# Only NIO_udp
if nio_port and isinstance(nio_port, lib.NIO_udp):
(remote_device, remote_adapter, remote_port) = lib.get_reverse_udp_nio(nio_port)
if isinstance(remote_device, lib.ETHSW):
self.add_in_connection_list((device.name, str(port), remote_device.name, str(remote_port)), connection_list)
if isinstance(device, lib.FRSW):
keys = device.pvcs.keys()
for (port, dlci) in keys:
nio_port = device.nio(port)
if nio_port:
(remote_device, remote_adapter, remote_port) = lib.get_reverse_udp_nio(nio_port)
if isinstance(remote_device, lib.FRSW):
self.add_in_connection_list((device.name, str(port), remote_device.name, str(remote_port)), connection_list)
if isinstance(device, lib.ATMSW):
keys = device.vpivci_map.keys()
for key in keys:
port = key[0]
nio_port = device.nio(port)
if nio_port:
(remote_device, remote_adapter, remote_port) = lib.get_reverse_udp_nio(nio_port)
if isinstance(remote_device, lib.ATMSW) or isinstance(remote_device, lib.ATMBR):
self.add_in_connection_list((device.name, str(port), remote_device.name, str(remote_port)), connection_list)
if isinstance(device, lib.ATMBR):
keys = device.mapping.keys()
for port in keys:
nio_port = device.nio(port)
if nio_port:
(remote_device, remote_adapter, remote_port) = lib.get_reverse_udp_nio(nio_port)
if isinstance(remote_device, lib.ATMSW) or isinstance(remote_device, lib.ATMBR):
self.add_in_connection_list((device.name, str(port), remote_device.name, str(remote_port)), connection_list)
if isinstance(device, lib.Hub):
keys = device.nios.keys()
for port in keys:
nio_port = device.nio(port)
# Only NIO_udp
if nio_port and isinstance(nio_port, lib.NIO_udp):
(remote_device, remote_adapter, remote_port) = lib.get_reverse_udp_nio(nio_port)
if isinstance(remote_device, lib.Hub) or isinstance(remote_device, lib.ETHSW):
self.add_in_connection_list((device.name, str(port), remote_device.name, str(remote_port)), connection_list)
def create_node(self, device, default_symbol_name, running_config_name):
""" Create a new node
"""
debugmsg(2, "**** NETFile::create_node(%s, %s, %s)" % (unicode(device), unicode(default_symbol_name), unicode(running_config_name)))
symbol_name = x = y = z = hx = hy = None
config = None
if isinstance(device, qlib.AnyEmuDevice) and self.dynagen.globalconfig['qemu ' + device.dynamips.host + ':' + str(device.dynamips.port)].has_key(running_config_name):
config = self.dynagen.globalconfig['qemu ' + device.dynamips.host + ':' + str(device.dynamips.port)][running_config_name]
elif isinstance(device, vboxlib.AnyVBoxEmuDevice) and self.dynagen.globalconfig['vbox ' + device.dynamips.host + ':' + str(device.dynamips.port)].has_key(running_config_name):
config = self.dynagen.globalconfig['vbox ' + device.dynamips.host + ':' + str(device.dynamips.port)][running_config_name]
elif self.dynagen.globalconfig.has_key(device.dynamips.host + ':' + str(device.dynamips.port)) and \
self.dynagen.globalconfig[device.dynamips.host +':' + str(device.dynamips.port)].has_key(running_config_name):
config = self.dynagen.globalconfig[device.dynamips.host + ':' + str(device.dynamips.port)][running_config_name]
elif self.dynagen.globalconfig.has_key(device.dynamips.host) and self.dynagen.globalconfig[device.dynamips.host].has_key(running_config_name):
config = self.dynagen.globalconfig[device.dynamips.host][running_config_name]
#print "config = %s" % str(config)
if config:
if config.has_key('x'):
x = config['x']
if config.has_key('y'):
y = config['y']
if config.has_key('z'):
z = config['z']
if config.has_key('hx'):
hx = config['hx']
if config.has_key('hy'):
hy = config['hy']
if config.has_key('symbol'):
symbol_name = config['symbol']
#print "symbol_name = %s" % str(symbol_name)
node = None
if symbol_name:
for item in SYMBOLS:
if item['name'] == default_symbol_name:
symbol_resources = QtCore.QResource(":/symbols")
for symbol in symbol_resources.children():
symbol = str(symbol)
if symbol.startswith(symbol_name):
normal_renderer = QtSvg.QSvgRenderer(':/symbols/' + symbol_name + '.normal.svg')
select_renderer = QtSvg.QSvgRenderer(':/symbols/' + symbol_name + '.selected.svg')
node = item['object'](normal_renderer, select_renderer)
node.type = symbol_name
node.default_symbol = False
break
break
debugmsg(3, "NETFile.py, node = %s" % unicode(node))
if not node:
# symbol name not found, use default one
default_symbol = False
if not symbol_name or not globals.GApp.scene.renders.has_key(symbol_name):
symbol_name = default_symbol_name
default_symbol = True
debugmsg(3, "NETFile.py, symbol_name = %s" % unicode(symbol_name))
for item in SYMBOLS:
if item['name'] == symbol_name:
renders = globals.GApp.scene.renders[symbol_name]
node = item['object'](renders['normal'], renders['selected'])
node.type = item['name']
if not default_symbol:
node.default_symbol = False
break
debugmsg(3, "NETFile.py, node = %s" % unicode(node))
if not node:
return None
node.set_hostname(device.name)
if x == None:
x = random.uniform(-200, 200)
if y == None:
y = random.uniform(-200, 200)
node.setPos(float(x), float(y))
if z:
node.setZValue(float(z))
if hx and hy:
node.hostname_xpos = float(hx)
node.hostname_ypos = float(hy)
if globals.GApp.workspace.flg_showHostname == True:
node.showHostname()
debug("Node created: " + str(node))
return node
def record_image(self, device):
""" Record an image and all its settings in GNS3
"""
debugmsg(2, "NETFile::record_image()")
conf_image = iosImageConf()
conf_image.id = globals.GApp.iosimages_ids
globals.GApp.iosimages_ids += 1
conf_image.filename = unicode(device.image)
# dynamips lib doesn't return c3700, force platform
if device.model == 'c3725' or device.model == 'c3745':
conf_image.platform = 'c3700'
else:
conf_image.platform = str(device.model)
conf_image.chassis = str(device.model_string)
if device.idlepc:
conf_image.idlepc = str(device.idlepc)
conf_image.default_ram = device.ram
conf_image.hypervisor_port = device.dynamips.port
conf_image.default = False
if device.dynamips.host == globals.GApp.systconf['dynamips'].HypervisorManager_binding and globals.GApp.systconf['dynamips'].import_use_HypervisorManager:
conf_image.hypervisors = []
globals.GApp.iosimages[globals.GApp.systconf['dynamips'].HypervisorManager_binding + ':' + device.image] = conf_image
else:
# this is an external hypervisor
host = unicode(device.dynamips.host)
conf_image.hypervisors = [host + ':' + str(device.dynamips.port)]
conf_hypervisor = hypervisorConf()
conf_hypervisor.id = globals.GApp.hypervisors_ids
globals.GApp.hypervisors_ids += 1
conf_hypervisor.host = host
conf_hypervisor.port = device.dynamips.port
conf_hypervisor.workdir = unicode(device.dynamips.workingdir)
conf_hypervisor.baseUDP = device.dynamips.udp
conf_hypervisor.baseConsole = device.dynamips.baseconsole
conf_hypervisor.baseAUX = device.dynamips.baseaux
globals.GApp.hypervisors[conf_hypervisor.host + ':' + str(conf_hypervisor.port)] = conf_hypervisor
globals.GApp.iosimages[host + ':' + device.image] = conf_image
def configure_node(self, node, device):
""" Configure a node
"""
debugmsg(2, "NETFile::configure_node()")
if isinstance(device, lib.Router):
if (device.dynamips.host == globals.GApp.systconf['dynamips'].HypervisorManager_binding or device.dynamips.host == 'localhost') and \
globals.GApp.HypervisorManager and globals.GApp.systconf['dynamips'].import_use_HypervisorManager:
hypervisor = globals.GApp.HypervisorManager.getHypervisor(device.dynamips.port)
hypervisor['load'] += node.default_ram
node.set_hypervisor(device.dynamips)
if not globals.GApp.iosimages.has_key(device.dynamips.host + ':' + device.image):
self.record_image(device)
image_conf = globals.GApp.iosimages[device.dynamips.host + ':' + device.image]
globals.GApp.topology.preConfigureNode(node, image_conf)
QtCore.QObject.connect(node, QtCore.SIGNAL("Add link"), globals.GApp.scene.slotAddLink)
QtCore.QObject.connect(node, QtCore.SIGNAL("Delete link"), globals.GApp.scene.slotDeleteLink)
globals.GApp.topology.nodes[node.id] = node
node.set_dynagen_device(device)
device.dynamips.configchange = True
return True
def add_connection(self, connection):
""" Add a connection
"""
debugmsg(2, "NETFile::add_connection()")
debug('Add connection ' + str(connection))
(source_name, source_interface, destination_name, destination_interface) = connection
srcid = globals.GApp.topology.getNodeID(source_name)
src_node = globals.GApp.topology.getNode(srcid)
if destination_name == 'nio':
cloud = self.create_cloud(destination_interface, source_name, source_interface)
dstid = cloud.id
dst_node = cloud
else:
dstid = globals.GApp.topology.getNodeID(destination_name)
dst_node = globals.GApp.topology.getNode(dstid)
globals.GApp.topology.recordLink(srcid, source_interface, dstid, destination_interface, src_node, dst_node)
if not isinstance(src_node, IOSRouter) and not isinstance(src_node, AnyEmuDevice) and not isinstance(src_node, AnyVBoxEmuDevice):
if not isinstance(src_node, Cloud) and not src_node.hypervisor:
src_node.get_dynagen_device()
src_node.startupInterfaces()
src_node.state = 'running'
if not isinstance(dst_node, IOSRouter) and not isinstance(dst_node, AnyEmuDevice) and not isinstance(dst_node, AnyVBoxEmuDevice):
if not isinstance(dst_node, Cloud) and not dst_node.hypervisor:
dst_node.get_dynagen_device()
dst_node.startupInterfaces()
dst_node.state = 'running'
def create_cloud(self, nio, source_device, source_interface):
""" Create a cloud (used for NIO connections)
"""
debugmsg(2, "NETFile::create_cloud()")
nio = nio.lower()
if self.connection2cloud.has_key((source_device, source_interface, nio)):
return (self.connection2cloud[(source_device, source_interface, nio)])
renders = globals.GApp.scene.renders['Cloud']
cloud = Cloud(renders['normal'], renders['selected'])
x = random.uniform(-200, 200)
y = random.uniform(-200, 200)
cloud.setPos(x, y)
config = {}
config['nios'] = [nio]
config['rpcap_mapping'] = dict(self.dynagen.getRpcapMapping())
cloud.set_config(config)
QtCore.QObject.connect(cloud, QtCore.SIGNAL("Add link"), globals.GApp.scene.slotAddLink)
QtCore.QObject.connect(cloud, QtCore.SIGNAL("Delete link"), globals.GApp.scene.slotDeleteLink)
globals.GApp.topology.nodes[cloud.id] = cloud
if globals.GApp.workspace.flg_showHostname == True:
cloud.showHostname()
globals.GApp.topology.addItem(cloud)
return cloud
def apply_gns3_data(self):
""" Apply specific GNS3 data
"""
debugmsg(2, "NETFile::apply_gns3_data()")
max_cloud_id = -1
max_decorative_id = -1
gns3data = self.dynagen.getGNS3Data()
if gns3data:
if gns3data.has_key('width') and gns3data.has_key('height'):
width = int(gns3data['width'])
height = int(gns3data['height'])
globals.GApp.topology.setSceneRect(-(width / 2), -(height / 2), width, height)
if gns3data.has_key('m11') and gns3data.has_key('m22'):
globals.GApp.scene.setMatrix(QtGui.QMatrix(float(gns3data['m11']), 0.0, 0.0, float(gns3data['m22']), 0.0, 0.0))
for section in gns3data:
try:
(devtype, hostname) = section.split(' ')
except ValueError:
continue
if devtype.lower() == 'cloud':
default_symbol = True
symbol_name = None
if gns3data[section].has_key('symbol') and gns3data[section]['symbol']:
symbol_name = gns3data[section]['symbol']
symbol_resources = QtCore.QResource(":/symbols")
for symbol in symbol_resources.children():
symbol = str(symbol)
if symbol.startswith(symbol_name):
normal_renderer = QtSvg.QSvgRenderer(':/symbols/' + symbol_name + '.normal.svg')
select_renderer = QtSvg.QSvgRenderer(':/symbols/' + symbol_name + '.selected.svg')
default_symbol = False
break
if default_symbol:
if not symbol_name or not globals.GApp.scene.renders.has_key(symbol_name):
symbol_name = 'Cloud'
else:
default_symbol = False
normal_renderer = globals.GApp.scene.renders[symbol_name]['normal']
select_renderer = globals.GApp.scene.renders[symbol_name]['selected']
cloud = Cloud(normal_renderer, select_renderer)
config = {}
config['nios'] = []
config['rpcap_mapping'] = dict(self.dynagen.getRpcapMapping())
cloud.type = symbol_name
if not default_symbol:
cloud.default_symbol = False
cloud.hostname = unicode(hostname)
if gns3data[section].has_key('x') and gns3data[section].has_key('y') \
and gns3data[section]['x'] != None and gns3data[section]['y'] != None:
cloud.setPos(float(gns3data[section]['x']), float(gns3data[section]['y']))
if gns3data[section].has_key('z'):
cloud.setZValue(float(gns3data[section]['z']))
if gns3data[section].has_key('hx') and gns3data[section].has_key('hy') \
and gns3data[section]['hx'] != None and gns3data[section]['hy'] != None:
cloud.hostname_xpos = float(gns3data[section]['hx'])
cloud.hostname_ypos = float(gns3data[section]['hy'])
if gns3data[section].has_key('connections'):
connections = gns3data[section]['connections'].split(' ')
for connection in connections:
(device, interface, nio) = connection.split(':', 2)
self.connection2cloud[(device, interface, nio.lower())] = cloud
config['nios'].append(nio)
cloud.set_config(config)
QtCore.QObject.connect(cloud, QtCore.SIGNAL("Add link"), globals.GApp.scene.slotAddLink)
QtCore.QObject.connect(cloud, QtCore.SIGNAL("Delete link"), globals.GApp.scene.slotDeleteLink)
globals.GApp.topology.nodes[cloud.id] = cloud
if globals.GApp.workspace.flg_showHostname == True:
cloud.showHostname()
globals.GApp.topology.addItem(cloud)
match_obj = cloud_hostname_re.match(cloud.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_cloud_id:
max_cloud_id = id
if devtype.lower() == 'note':
note_object = Annotation()
text = gns3data[section]['text'].replace("\\n", "\n")
# remove protective quote if present
if len(text) > 1 and text[0] == '"' and text[-1] == '"':
text = text[1:-1]
note_object.setPlainText(text)
note_object.setPos(float(gns3data[section]['x']), float(gns3data[section]['y']))
if gns3data[section].has_key('z'):
note_object.setZValue(float(gns3data[section]['z']))
if note_object.zValue() < 0:
# object on background layer, user cannot select it and move it.
note_object.setFlag(note_object.ItemIsSelectable, False)
note_object.setFlag(note_object.ItemIsMovable, False)
if gns3data[section].has_key('font'):
font = QtGui.QFont()
if font.fromString(gns3data[section]['font'][1:-1]):
note_object.setFont(font)
else:
print translate("NETFile", "Cannot load font: %s") % gns3data[section]['font']
if gns3data[section].has_key('rotate'):
note_object.rotation = int(gns3data[section]['rotate'])
note_object.rotate(note_object.rotation)
if gns3data[section].has_key('color'):
note_object.setDefaultTextColor(QtGui.QColor(gns3data[section]['color'][1:-1]))
# this is an interface label, save it in a dict to be used later ...
if gns3data[section].has_key('interface'):
globals.interfaceLabels[gns3data[section]['interface']] = note_object
else:
globals.GApp.topology.addItem(note_object)
if devtype.lower() == 'shape':
if gns3data[section]['type'] == 'rectangle':
size = QtCore.QSizeF(float(gns3data[section]['width']), float(gns3data[section]['height']))
pos = QtCore.QPointF(float(gns3data[section]['x']), float(gns3data[section]['y']))
shape_object = Rectangle(pos, size)
else:
size = QtCore.QSizeF(float(gns3data[section]['width']), float(gns3data[section]['height']))
pos = QtCore.QPointF(float(gns3data[section]['x']), float(gns3data[section]['y']))
shape_object = Ellipse(pos, size)
if gns3data[section].has_key('z'):
shape_object.setZValue(float(gns3data[section]['z']))
if shape_object.zValue() < 0:
# object on background layer, user cannot select it and move it.
shape_object.setFlag(shape_object.ItemIsSelectable, False)
shape_object.setFlag(shape_object.ItemIsMovable, False)
if gns3data[section].has_key('rotate'):
shape_object.rotation = int(gns3data[section]['rotate'])
shape_object.rotate(shape_object.rotation)
if gns3data[section].has_key('fill_color'):
brush = QtGui.QBrush(QtGui.QColor(gns3data[section]['fill_color'][1:-1]))
shape_object.setBrush(brush)
pen = QtGui.QPen(QtCore.Qt.black, 2, QtCore.Qt.SolidLine, QtCore.Qt.RoundCap, QtCore.Qt.RoundJoin)
if gns3data[section].has_key('border_color'):
pen.setColor(QtGui.QColor(gns3data[section]['border_color'][1:-1]))
if gns3data[section].has_key('border_width'):
pen.setWidth(int(gns3data[section]['border_width']))
if gns3data[section].has_key('border_style'):
pen.setStyle(QtCore.Qt.PenStyle(int(gns3data[section]['border_style'])))
shape_object.setPen(pen)
globals.GApp.topology.addItem(shape_object)
if devtype.lower() == 'pixmap':
pixmap_path = unicode(gns3data[section]['path'])
# Check if this is a relative pixmap path and convert to an absolute path if necessary
abspath = os.path.join(os.path.dirname(self.dynagen.filename), pixmap_path)
if os.path.exists(abspath):
pixmap_path = abspath
debug(unicode("Converting relative pixmap path to absolute path: %s") % pixmap_path)
pixmap_image = QtGui.QPixmap(pixmap_path)
if not pixmap_image.isNull():
pixmap_object = Pixmap(pixmap_image, pixmap_path)
else:
print translate("NETFile", "Cannot load image: %s") % pixmap_path
continue
pixmap_object.setPos(float(gns3data[section]['x']), float(gns3data[section]['y']))
if gns3data[section].has_key('z'):
pixmap_object.setZValue(float(gns3data[section]['z']))
if pixmap_object.zValue() < 0:
# object on background layer, user cannot select it and move it.
pixmap_object.setFlag(pixmap_object.ItemIsSelectable, False)
pixmap_object.setFlag(pixmap_object.ItemIsMovable, False)
globals.GApp.topology.addItem(pixmap_object)
if devtype.lower() == 'node':
hostname = unicode(hostname)
symbol = unicode(gns3data[section]['symbol'])
if not globals.GApp.scene.renders.has_key(symbol):
print translate("NETFile", "%s: cannot find %s symbol, please check this symbol is in your node list and reload the .net file") % (hostname, symbol)
continue
renders = globals.GApp.scene.renders[symbol]
decorative_node = DecorativeNode(renders['normal'], renders['selected'])
decorative_node.set_hostname(hostname)
decorative_node.create_config()
decorative_node.setPos(float(gns3data[section]['x']), float(gns3data[section]['y']))
if gns3data[section].has_key('z'):
decorative_node.setZValue(float(gns3data[section]['z']))
decorative_node.type = symbol
QtCore.QObject.connect(decorative_node, QtCore.SIGNAL("Add link"), globals.GApp.scene.slotAddLink)
QtCore.QObject.connect(decorative_node, QtCore.SIGNAL("Delete link"), globals.GApp.scene.slotDeleteLink)
globals.GApp.topology.nodes[decorative_node.id] = decorative_node
if globals.GApp.workspace.flg_showHostname == True:
decorative_node.showHostname()
globals.GApp.topology.addItem(decorative_node)
match_obj = decorative_hostname_re.match(decorative_node.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_decorative_id:
max_decorative_id = id
if gns3data[section].has_key('connections'):
connections = gns3data[section]['connections'].split(' ')
config = decorative_node.get_config()
for connection in connections:
(device, remote_interface, local_interface) = connection.split(':', 2)
if not self.decorative_node_connections.has_key((hostname, local_interface, remote_interface)):
self.decorative_node_connections[(device, remote_interface, local_interface)] = decorative_node.id
if local_interface not in config['interfaces']:
config['interfaces'].append(local_interface)
# update next ID for cloud
if max_cloud_id != -1:
init_cloud_id(max_cloud_id + 1)
if max_decorative_id != -1:
init_decoration_id(max_decorative_id + 1)
if len(globals.interfaceLabels):
globals.GApp.workspace.flg_showOnlySavedInterfaceNames = True
def apply_decorative_node_connections(self):
""" Create GUI connections for decorative nodes
"""
debugmsg(2, "NETFile::apply_decorative_node_connections()")
for (connection, local_device) in self.decorative_node_connections.iteritems():
(remote_device, remote_interface, local_interface) = connection
if isinstance(remote_device, IOSRouter):
remote_device.smart_interface(remote_interface[0])
srcid = local_device
dstid = globals.GApp.topology.getNodeID(remote_device)
globals.GApp.topology.addLink(srcid, local_interface, dstid, remote_interface)
def import_net_file(self, path):
""" Import a .net file
"""
debugmsg(2, "NETFile::import_net_file(%s)" % unicode(path))
if globals.GApp.systconf['dynamips'].import_use_HypervisorManager and globals.GApp.systconf['dynamips'].path == '':
QtGui.QMessageBox.warning(globals.GApp.mainWindow, translate("NETFile", "Save"), translate("NETFile", "Please configure the path to Dynamips"))
return
globals.GApp.workspace.clear()
dynagen_namespace.CONFIGSPECPATH = []
dir = os.path.dirname(dynagen_namespace.__file__)
debugmsg(3, "NETFile::import_net_file(), os.path.dirname(dynagen_namespace.__file__) = %s" % unicode(dir))
dynagen_namespace.CONFIGSPECPATH.append(dir)
try:
debugmsg(3, "NETFile.py: import_config, try: path = %s" % unicode(path))
dynagen_namespace.FILENAME = path
debugmsg(3, "NETFile.py: import_config, try: dynagen.import_config")
self.dynagen.import_config(path)
debugmsg(3, "NETFile.py: import_config, try: QtGui.QSplashScreen()")
splash = QtGui.QSplashScreen(QtGui.QPixmap(":images/logo_gns3_splash.png"))
splash.show()
splash.showMessage(translate("NETFile", "Please wait while importing the topology"))
debugmsg(3, "NETFile.py: import_config, try: GApp.processEvents")
globals.GApp.processEvents(QtCore.QEventLoop.AllEvents | QtCore.QEventLoop.WaitForMoreEvents, 1000)
debugmsg(3, "NETFile.py: import_config, dynagen.ghosting()")
self.dynagen.ghosting()
if globals.GApp.systconf['dynamips'].jitsharing:
self.dynagen.jitsharing()
except lib.DynamipsError, msg:
QtGui.QMessageBox.critical(globals.GApp.mainWindow, translate("NETFile", "Dynamips error"), unicode(msg))
globals.GApp.workspace.projectFile = None
globals.GApp.workspace.setWindowTitle("GNS3")
globals.GApp.workspace.clear()
return
except lib.DynamipsWarning, msg:
QtGui.QMessageBox.warning(globals.GApp.mainWindow, translate("NETFile", "Dynamips warning"), unicode(msg))
globals.GApp.workspace.projectFile = None
globals.GApp.workspace.setWindowTitle("GNS3")
globals.GApp.workspace.clear()
return
except Exception, ex:
curdate = time.strftime("%d %b %Y %H:%M:%S")
logfile = open('import_exception.log','a')
logfile.write("=== GNS3 " + VERSION + " traceback on " + curdate + " ===")
traceback.print_exc(file=logfile)
logfile.close()
traceback.print_exc()
exception_file = GNS3_RUN_PATH + os.sep + 'import_exception.log'
QtGui.QMessageBox.critical(globals.GApp.mainWindow, translate("NETFile", "Importation"), translate("NETFile", "Topology importation has failed! Exception detected, details saved in %s") % exception_file)
globals.GApp.workspace.projectFile = None
globals.GApp.workspace.setWindowTitle("GNS3")
globals.GApp.workspace.clear()
return
self.dynagen.apply_idlepc()
self.dynagen.get_defaults_config()
self.dynagen.update_running_config()
debug("Running config before importing: " + str(self.dynagen.running_config))
self.apply_gns3_data()
connection_list = []
config_dir = None
max_router_id = -1
max_ethsw_id = -1
max_hub_id = -1
max_frsw_id = -1
max_atmsw_id = -1
max_atmbr_id = -1
max_emu_id = -1
max_vbox_emu_id = -1
for (devicename, device) in self.dynagen.devices.iteritems():
if isinstance(device, lib.Bridge):
translate("NETFile", "Warning: GNS3 doesn't yet support lan statements, ignore it")
continue
if devicename.lower() == 'lan':
print translate("NETFile", "Warning: connections to device %s might not work properly and have to be removed manually by editing the topology file in a text editor") % devicename
if isinstance(device, lib.Router):
platform = device.model
# dynamips lib doesn't return c3700, force platform
if platform == 'c3725' or platform == 'c3745':
platform = 'c3700'
model = device.model_string
node = self.create_node(device, 'Router ' + platform, 'ROUTER ' + device.name)
assert(node)
self.configure_node(node, device)
self.populate_connection_list_for_router(device, connection_list)
if not config_dir and device.cnfg:
config_dir = os.path.dirname(device.cnfg)
#FIXME: don't hardcode baseconfig.txt
if device.cnfg and not os.path.exists(device.cnfg):
baseconfig = globals.GApp.systconf['general'].ios_path + os.sep + 'baseconfig.txt'
if os.path.exists(baseconfig):
globals.GApp.topology.applyIOSBaseConfig(node, baseconfig)
elif os.path.exists(GNS3_RUN_PATH + os.sep + 'baseconfig.txt'):
globals.GApp.topology.applyIOSBaseConfig(node, GNS3_RUN_PATH + os.sep + 'baseconfig.txt')
match_obj = router_hostname_re.match(node.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_router_id:
max_router_id = id
elif isinstance(device, lib.ETHSW):
node = self.create_node(device, 'Ethernet switch', 'ETHSW ' + device.name)
self.configure_node(node, device)
config = {}
config['vlans'] = {}
config['ports'] = {}
keys = device.mapping.keys()
keys.sort()
for port in keys:
(porttype, tmpvlan, nio, twosided) = device.mapping[port]
vlan = int(tmpvlan)
if not config['vlans'].has_key(vlan):
config['vlans'][vlan] = []
if twosided:
config['ports'][port] = porttype
config['vlans'][vlan].append(port)
else:
config['ports'][port] = porttype
config['vlans'][vlan].append(port)
cloud = self.create_cloud(nio.config_info(), device.name, str(port))
globals.GApp.topology.recordLink(node.id, str(port), cloud.id, nio.config_info(), node, cloud)
cloud.startNode()
node.set_config(config)
node.set_hypervisor(device.dynamips)
self.populate_connection_list_for_emulated_switch(device, connection_list)
match_obj = ethsw_hostname_re.match(node.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_ethsw_id:
max_ethsw_id = id
elif isinstance(device, lib.Hub):
node = self.create_node(device, 'Ethernet hub', 'Hub ' + device.name)
self.configure_node(node, device)
config = {}
keys = device.nios.keys()
keys.sort()
config['ports'] = range(1, len(keys) + 1)
for port in keys:
nio = device.nios[port]
if nio.config_info().lower()[:3] == 'nio':
cloud = self.create_cloud(nio.config_info(), device.name, str(port))
globals.GApp.topology.recordLink(node.id, str(port), cloud.id, nio.config_info(), node, cloud)
cloud.startNode()
node.set_config(config)
node.set_hypervisor(device.dynamips)
self.populate_connection_list_for_emulated_switch(device, connection_list)
match_obj = hub_hostname_re.match(node.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_hub_id:
max_hub_id = id
elif isinstance(device, lib.FRSW):
config = {}
config['ports'] = []
config['mapping'] = {}
keys = device.pvcs.keys()
keys.sort()
for (port1, dlci1) in keys:
(port2, dlci2) = device.pvcs[(port1, dlci1)]
if not port1 in config['ports']:
config['ports'].append(port1)
if not port2 in config['ports']:
config['ports'].append(port2)
config['mapping'][str(port1) + ':' + str(dlci1)] = str(port2) + ':' + str(dlci2)
node = self.create_node(device, 'Frame Relay switch', 'FRSW ' + device.name)
self.configure_node(node, device)
node.set_config(config)
node.set_hypervisor(device.dynamips)
self.populate_connection_list_for_emulated_switch(device, connection_list)
match_obj = frsw_hostname_re.match(node.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_frsw_id:
max_frsw_id = id
elif isinstance(device, lib.ATMSW):
config = {}
config['ports'] = []
config['mapping'] = {}
keys = device.vpivci_map.keys()
keys.sort()
for key in keys:
if len(key) == 2:
#port1, vpi1 -> port2, vpi2
(port1, vpi1) = key
(port2, vpi2) = device.vpivci_map[key]
config['mapping'][str(port1) + ':' + str(vpi1)] = str(port2) + ':' + str(vpi2)
if not port1 in config['ports']:
config['ports'].append(port1)
if not port2 in config['ports']:
config['ports'].append(port2)
for key in keys:
if len(key) == 3:
#port1, vpi1, vci1 -> port2, vpi2, vci1
(port1, vpi1, vci1) = key
(port2, vpi2, vci2) = device.vpivci_map[key]
config['mapping'][str(port1) + ':' + str(vpi1) + ':' + str(vci1)] = str(port2) + ':' + str(vpi2) + ':' + str(vci2)
if not port1 in config['ports']:
config['ports'].append(port1)
if not port2 in config['ports']:
config['ports'].append(port2)
node = self.create_node(device, 'ATM switch', 'ATMSW ' + device.name)
self.configure_node(node, device)
node.set_config(config)
node.set_hypervisor(device.dynamips)
self.populate_connection_list_for_emulated_switch(device, connection_list)
match_obj = atmsw_hostname_re.match(node.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_atmsw_id:
max_atmsw_id = id
elif isinstance(device, lib.ATMBR):
config = {}
config['ports'] = []
config['mapping'] = {}
keys = device.mapping.keys()
keys.sort()
for port1 in keys:
(port2, vpi, vci) = device.mapping[port1]
config['mapping'][str(port1)] = str(port2) + ':' + str(vpi) + ':' + str(vci)
if not port1 in config['ports']:
config['ports'].append(port1)
if not port2 in config['ports']:
config['ports'].append(port2)
node = self.create_node(device, 'ATM bridge', 'ATMBR ' + device.name)
self.configure_node(node, device)
node.set_config(config)
node.set_hypervisor(device.dynamips)
self.populate_connection_list_for_emulated_switch(device, connection_list)
match_obj = atmsw_hostname_re.match(node.hostname)
if match_obj:
id = int(match_obj.group(1))
if id > max_atmbr_id:
max_atmbr_id = id
elif isinstance(device, qlib.AnyEmuDevice) or isinstance(device, vboxlib.AnyVBoxEmuDevice):
node = self.create_node(device, device._ufd_machine, device.gen_cfg_name())
assert(node)
node.set_hypervisor(device.dynamips)
self.configure_node(node, device)
node.create_config()
self.populate_connection_list_for_emulated_device(device, connection_list)
match_obj = emu_hostname_re.match(node.hostname)
if match_obj:
id = int(match_obj.group(1))
if isinstance(device, qlib.AnyEmuDevice) and (id > max_emu_id):
max_emu_id = id
if isinstance(device, vboxlib.AnyVBoxEmuDevice) and (id > max_vbox_emu_id):
max_vbox_emu_id = id
globals.GApp.topology.addItem(node)
# update next IDs for nodes
if max_router_id != -1:
init_router_id(max_router_id + 1)
if max_ethsw_id != -1:
init_ethsw_id(max_ethsw_id + 1)
if max_hub_id != -1:
init_hub_id(max_hub_id + 1)
if max_frsw_id != -1:
init_frsw_id(max_frsw_id + 1)
if max_atmsw_id != -1:
init_atmsw_id(max_atmsw_id + 1)
if max_atmbr_id != -1:
init_atmbr_id(max_atmbr_id + 1)
if max_emu_id != -1:
init_emu_id(max_emu_id + 1)
if max_vbox_emu_id != -1:
init_vbox_emu_id(max_vbox_emu_id + 1)
# update current hypervisor base port and base UDP
base_udp = 0
hypervisor_port = 0
working_dir = None
for dynamips in globals.GApp.dynagen.dynamips.values():
if isinstance(dynamips, lib.Dynamips):
if not working_dir:
working_dir = dynamips.workingdir
if dynamips.port > hypervisor_port:
hypervisor_port = dynamips.port
if dynamips.starting_udp > base_udp:
base_udp = dynamips.starting_udp
if base_udp:
globals.GApp.dynagen.globaludp = base_udp + globals.GApp.systconf['dynamips'].udp_incrementation
if hypervisor_port:
globals.hypervisor_baseport = hypervisor_port + 1
debug("set hypervisor base port: " + str(globals.hypervisor_baseport))
debug("set base UDP: " + str(globals.GApp.dynagen.globaludp))
# restore project working directory if not found in gns3 data
if not globals.GApp.workspace.projectWorkdir and working_dir and working_dir[-7:] == 'working':
globals.GApp.workspace.projectWorkdir = os.path.abspath(working_dir)
debug("Set working directory: " + os.path.abspath(working_dir))
# restore project configs directory if not found in gns3 data
if not globals.GApp.workspace.projectConfigs and config_dir and config_dir[-7:] == 'configs':
globals.GApp.workspace.projectConfigs = os.path.abspath(config_dir)
debug("Set configs directory: " + os.path.abspath(config_dir))
for connection in connection_list:
self.add_connection(connection)
self.apply_decorative_node_connections()
globals.GApp.mainWindow.treeWidget_TopologySummary.refresh()
globals.GApp.dynagen.update_running_config()
globals.GApp.workspace.projectFile = path
globals.GApp.workspace.setWindowTitle("GNS3 - " + globals.GApp.workspace.projectFile)
debug("Running config after importing: " + str(self.dynagen.running_config))
for node in globals.GApp.topology.nodes.itervalues():
node.updateToolTips()
def export_router_config(self, device, auto=False):
curtime = time.strftime("%H:%M:%S")
try:
file_path = os.path.normpath(globals.GApp.workspace.projectConfigs) + os.sep + device.name + '.cfg'
config = base64.decodestring(device.config_b64)
config = '!\n' + config.replace('\r', "")
# Write out the config to a file
if auto == False:
print translate("NETFile", "%s: Exporting %s configuration to %s") % (curtime, device.name, file_path)
except lib.DynamipsError, msg:
if auto == False:
print translate("NETFile", "%s: %s: Dynamips error: %s") % (curtime, device.name, msg)
return
except lib.DynamipsWarning, msg:
if auto == False:
print translate("NETFile", "%s: %s: Dynamips warning: %s") % (curtime, device.name, msg)
return
except lib.DynamipsErrorHandled:
print translate("NETFile", "%s: Dynamips process %s:%i has crashed") % (curtime, device.dynamips.host, device.dynamips.port)
file_path = os.path.normpath(globals.GApp.workspace.projectConfigs) + os.sep + device.name + '.recovered.cfg'
dynamips_files = glob.glob(os.path.normpath(device.dynamips.workingdir) + os.sep + device.model + '_' + device.name + '_nvram*')
dynamips_files += glob.glob(os.path.normpath(device.dynamips.workingdir) + os.sep + device.model + '_' + device.name + '_rom')
for nvram_file in dynamips_files:
if nvram_export(nvram_file, file_path):
print translate("NETFile", "%s: Exporting %s configuration to %s using recovery method") % (curtime, device.name, file_path)
self.dynagen.running_config[device.dynamips.host + ':' + str(device.dynamips.port)]['ROUTER ' + device.name]['cnfg'] = file_path
else:
print translate("NETFile", "%s: %s: Could not export configuration to %s") % (curtime, device.name, file_path)
if device.state != 'stopped':
device.stop()
return
try:
f = open(file_path, 'w') #export_router_config
f.write(config)
f.close()
device.cnfg = file_path
self.dynagen.running_config[device.dynamips.host + ':' + str(device.dynamips.port)]['ROUTER ' + device.name]['cnfg'] = file_path
except IOError, e:
QtGui.QMessageBox.critical(globals.GApp.mainWindow, translate("NETFile", "%s: IOError") % device.name, translate("NETFile", "%s: IO Error: %s") % (file_path, e))
return
def export_net_file(self, path, auto=False):
""" Export a .net file
"""
# remove unused hypervisors
hypervisors = self.dynagen.dynamips.copy()
for (name, hypervisor) in hypervisors.iteritems():
if isinstance(hypervisor, lib.Dynamips) and len(hypervisor.devices) == 0:
has_ethsw = False
for item in globals.GApp.topology.items():
if (isinstance(item, ETHSW) or isinstance(item, Hub)) and item.hypervisor and item.hypervisor == hypervisor:
has_ethsw = True
break
if not has_ethsw:
del self.dynagen.dynamips[name]
if (isinstance(hypervisor, vboxlib.VBox) or isinstance(hypervisor, qlib.Qemu)) and len(hypervisor.devices) == 0:
del self.dynagen.dynamips[name]
for hypervisor in self.dynagen.dynamips.values():
hypervisor.configchange = True
self.dynagen.defaults_config_ran = False
self.dynagen.update_running_config()
debugmsg(3, ("NETFile.py: export_net_file() dynagen.running_config = ", self.dynagen.running_config))
debug("Running config: " + str(self.dynagen.running_config))
for item in globals.GApp.topology.items():
# record router configs
if isinstance(item, IOSRouter) and globals.GApp.workspace.projectConfigs:
device = item.get_dynagen_device()
try:
self.export_router_config(device, auto)
except lib.DynamipsErrorHandled:
item.shutdownInterfaces()
item.state = device.state
item.updateToolTips()
globals.GApp.mainWindow.treeWidget_TopologySummary.changeNodeStatus(item.hostname, item.state)
continue
note_nb = 1
shape_nb = 1
pix_nb = 1
for item in globals.GApp.topology.items():
# record clouds
if isinstance(item, Cloud):
if globals.GApp.workspace.flg_showHostname:
# ugly but simple method to force to record hostname x&y positions
item.removeHostname()
item.showHostname()
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
self.dynagen.running_config['GNS3-DATA']['Cloud ' + item.hostname] = {}
config = self.dynagen.running_config['GNS3-DATA']['Cloud ' + item.hostname]
if not item.default_symbol:
config['symbol'] = item.type
config['x'] = item.x()
config['y'] = item.y()
if item.hostname_xpos and item.hostname_ypos:
config['hx'] = item.hostname_xpos
config['hy'] = item.hostname_ypos
zvalue = item.zValue()
if zvalue != 0:
config['z'] = zvalue
# record connections
connections = ''
for interface in item.getConnectedInterfaceList():
neighbor = item.getConnectedNeighbor(interface)
connections = connections + neighbor[0].hostname + ':' + neighbor[1] + ':' + interface + ' '
if connections:
config['connections'] = connections.strip()
# record notes
elif isinstance(item, Annotation): #and item.autoGenerated == False:
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
self.dynagen.running_config['GNS3-DATA']['NOTE ' + str(note_nb)] = {}
config = self.dynagen.running_config['GNS3-DATA']['NOTE ' + str(note_nb)]
config['text'] = '"' + unicode(item.toPlainText(), 'utf-8', errors='replace').replace("\n", "\\n") + '"'
config['x'] = item.x()
config['y'] = item.y()
if item.deviceName and item.deviceIf:
config['interface'] = item.deviceName + ' ' + item.deviceIf
if item.font() != QtGui.QFont("TypeWriter", 10, QtGui.QFont.Bold):
config['font'] = '"' + str(item.font().toString()) + '"'
if item.rotation != 0:
config['rotate'] = item.rotation
if item.defaultTextColor() != QtCore.Qt.black:
config['color'] = '"' + str(item.defaultTextColor().name()) + '"'
zvalue = item.zValue()
if zvalue != 2:
config['z'] = zvalue
note_nb += 1
# record shape items
elif isinstance(item, AbstractShapeItem):
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
self.dynagen.running_config['GNS3-DATA']['SHAPE ' + str(shape_nb)] = {}
config = self.dynagen.running_config['GNS3-DATA']['SHAPE ' + str(shape_nb)]
if isinstance(item, QtGui.QGraphicsRectItem):
config['type'] = 'rectangle'
else:
config['type'] = 'ellipse'
config['x'] = item.x()
config['y'] = item.y()
rect = item.rect()
config['width'] = rect.width()
config['height'] = rect.height()
brush = item.brush()
if brush.style() != QtCore.Qt.NoBrush and brush.color() != QtCore.Qt.transparent:
config['fill_color'] = '"' + str(brush.color().name()) + '"'
if item.rotation != 0:
config['rotate'] = item.rotation
pen = item.pen()
if pen.color() != QtCore.Qt.black:
config['border_color'] = '"' + str(pen.color().name()) + '"'
if pen.width() != 2:
config['border_width'] = pen.width()
if pen.style() != QtCore.Qt.SolidLine:
config['border_style'] = pen.style()
zvalue = item.zValue()
if zvalue != 0:
config['z'] = zvalue
shape_nb += 1
# record inserted images
elif isinstance(item, Pixmap):
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
self.dynagen.running_config['GNS3-DATA']['PIXMAP ' + str(pix_nb)] = {}
config = self.dynagen.running_config['GNS3-DATA']['PIXMAP ' + str(pix_nb)]
if globals.GApp.systconf['general'].relative_paths:
config['path'] = self.convert_to_relpath(item.pixmap_path, path)
else:
config['path'] = item.pixmap_path
config['x'] = item.x()
config['y'] = item.y()
zvalue = item.zValue()
if zvalue != 0:
config['z'] = zvalue
pix_nb += 1
elif isinstance(item, DecorativeNode):
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
self.dynagen.running_config['GNS3-DATA']['NODE ' + item.hostname] = {}
config = self.dynagen.running_config['GNS3-DATA']['NODE ' + item.hostname]
config['symbol'] = item.type
config['x'] = item.x()
config['y'] = item.y()
if item.hostname_xpos and item.hostname_ypos:
config['hx'] = item.hostname_xpos
config['hy'] = item.hostname_ypos
# record connections
connections = ''
for interface in item.getConnectedInterfaceList():
neighbor = item.getConnectedNeighbor(interface)
connections = connections + neighbor[0].hostname + ':' + neighbor[1] + ':' + interface + ' '
if connections:
config['connections'] = connections.strip()
elif isinstance(item, AbstractNode):
if globals.GApp.workspace.flg_showHostname:
# ugly but simple method to force to record hostname x&y positions
item.removeHostname()
item.showHostname()
# record node x & y positions
if not item.d:
print translate("NETFile", "%s must be connected or have a hypervisor set in order to be registered") % item.hostname
continue
if not item.default_symbol:
self.dynagen.running_config[item.d][item.get_running_config_name()]['symbol'] = item.type
try:
self.dynagen.running_config[item.d][item.get_running_config_name()]['x'] = item.x()
self.dynagen.running_config[item.d][item.get_running_config_name()]['y'] = item.y()
zvalue = item.zValue()
if zvalue != 0:
self.dynagen.running_config[item.d][item.get_running_config_name()]['z'] = zvalue
# record hostname x & y positions
if item.hostname_xpos and item.hostname_ypos: #and \
self.dynagen.running_config[item.d][item.get_running_config_name()]['hx'] = item.hostname_xpos
self.dynagen.running_config[item.d][item.get_running_config_name()]['hy'] = item.hostname_ypos
except:
pass
# record project settings
if globals.GApp.workspace.projectConfigs or globals.GApp.workspace.projectWorkdir:
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
config = self.dynagen.running_config['GNS3-DATA']
if globals.GApp.workspace.projectConfigs:
config['configs'] = self.convert_to_relpath(globals.GApp.workspace.projectConfigs, path)
if globals.GApp.workspace.projectWorkdir:
config['workdir'] = self.convert_to_relpath(globals.GApp.workspace.projectWorkdir, path)
# register matrix data
matrix = globals.GApp.scene.matrix()
m11 = matrix.m11()
m22 = matrix.m22()
if float(m11) != 1.0 or float(m22) != 1.0:
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
self.dynagen.running_config['GNS3-DATA']['m11'] = m11
self.dynagen.running_config['GNS3-DATA']['m22'] = m22
# register scene size
scene_width = int(globals.GApp.topology.width())
scene_height = int(globals.GApp.topology.height())
if scene_width != 2000 or scene_height != 1000:
if not self.dynagen.running_config.has_key('GNS3-DATA'):
self.dynagen.running_config['GNS3-DATA'] = {}
self.dynagen.running_config['GNS3-DATA']['width'] = scene_width
self.dynagen.running_config['GNS3-DATA']['height'] = scene_height
# autostart
autostart = False
for (name, val) in self.dynagen.autostart.iteritems():
if val == True:
autostart = True
break
self.dynagen.running_config['autostart'] = autostart
# add GNS3 version
from __main__ import VERSION
self.dynagen.running_config['version'] = VERSION
if globals.GApp.systconf['general'].relative_paths:
# Change absolute paths to relative paths if same base as the config file
for hypervisor in self.dynagen.dynamips.values():
if isinstance(hypervisor, qlib.Qemu):
h = 'qemu ' + hypervisor.host + ":" + str(hypervisor.port)
elif isinstance(hypervisor, vboxlib.VBox):
h = 'vbox ' + hypervisor.host + ":" + str(hypervisor.port)
else:
h = hypervisor.host + ":" + str(hypervisor.port)
config = self.dynagen.running_config[h]
#if config.has_key('workingdir') and not isinstance(hypervisor, vboxlib.VBox): # Dirty hack.
if config.has_key('workingdir'):
config['workingdir'] = self.convert_to_relpath(config['workingdir'], path)
for model in dynagen_namespace.DEVICETUPLE:
if config.has_key(model):
# ASA and AWP has no image
if model == '5520' or model == 'Soft32':
config[model]['initrd'] = self.convert_to_relpath(config[model]['initrd'], path)
config[model]['kernel'] = self.convert_to_relpath(config[model]['kernel'], path)
# IDS-4215 has no default image
elif model == 'IDS-4215':
config[model]['image1'] = self.convert_to_relpath(config[model]['image1'], path)
config[model]['image2'] = self.convert_to_relpath(config[model]['image2'], path)
else:
config[model]['image'] = self.convert_to_relpath(config[model]['image'], path)
for subsection in config.sections:
device = config[subsection]
if device.has_key('cnfg') and device['cnfg']:
device['cnfg'] = self.convert_to_relpath(device['cnfg'], path)
self.dynagen.running_config.filename = path
try:
debugmsg(3, ("NETFile.py: writing... dynagen.running_config = ", self.dynagen.running_config))
self.dynagen.running_config.write()
except IOError, e:
QtGui.QMessageBox.critical(globals.GApp.mainWindow, translate("NETFile", "%s: IOError") % device.name, translate("NETFile", "%s: IO Error: %s") % (path, e))
self.dynagen.running_config.filename = None
def convert_to_relpath(self, path, config_path):
""" Returns a relative path when the config path and another path share a common base directory
"""
debugmsg(3, "NETFile.py: convert_to_relpath(%s, %s)" % (unicode(path), unicode(config_path)))
# Workaround, if remote hypervisor doesn't have workdir set:
if path == None:
return None
real_image_path = os.path.realpath(path)
config_dir = os.path.dirname(os.path.realpath(config_path))
commonprefix = os.path.commonprefix([real_image_path, config_dir])
if config_dir == commonprefix:
relpath = os.path.relpath(real_image_path, commonprefix)
debug("Convert path " + path + " to a relative path : " + relpath)
return relpath
return path
| dlintott/gns3 | src/GNS3/NETFile.py | Python | gpl-2.0 | 69,490 |
# !/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (c) 2012-2015 Christian Schwarz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# required external modules
import unittest
# required modules from pycast
from pycast.errors.meanabsolutescalederror import MeanAbsoluteScaledError
from pycast.common.timeseries import TimeSeries
class MeanAbsoluteScaledErrorTest(unittest.TestCase):
def initialization_error_test(self):
"""Test for the exceptions raised during initialization."""
MeanAbsoluteScaledError(minimalErrorCalculationPercentage=60.0, historyLength=20.0)
try:
MeanAbsoluteScaledError(60.0, 0.0)
except ValueError:
pass
else:
assert False # pragma: no cover
try:
MeanAbsoluteScaledError(60.0, -12.0)
except ValueError:
pass
else:
assert False # pragma: no cover
try:
MeanAbsoluteScaledError(60.0, 120.0)
except ValueError:
pass
else:
assert False # pragma: no cover
try:
MeanAbsoluteScaledError(60.0, 60.0)
except ValueError:
pass
else:
assert False # pragma: no cover
def calculate_historic_means_test(self):
"""Test the calculation of the historic means."""
dataOrg = [[1.0, 10], [2.0, 12], [3.0, 14], [4.0, 13], [5.0, 17], [6.0, 20], [7.0, 23], [8.0, 26], [9.0, 29], [10.0, 31], [11.0, 26], [12.0, 21], [13.0, 18], [14.0, 14], [15.0, 13], [16.0, 19], [17.0, 24], [18.0, 28], [19.0, 30], [20.0, 32]]
# 2 2 1 4 3 3 3 3 2 5 5 3 4 1 6 5 4 2 2
# Sum(History) 12 13 14 16 14 16 18 18 19 18 19 19 20 18 19
correctResult = [ 2.4, 2.6, 2.8, 3.2, 2.8, 3.2, 3.6, 3.6, 3.8, 3.6, 3.8, 3.8, 4.0, 3.6]
tsOrg = TimeSeries.from_twodim_list(dataOrg)
mase = MeanAbsoluteScaledError(historyLength=5)
result = mase._get_historic_means(tsOrg)
assert result == correctResult
def local_error_calculation_test(self):
"""Testing the mean absolute error calculation of the MASE."""
dataOrg = [[1.0, 10], [2.0, 12], [3.0, 14], [4.0, 13], [5.0, 17], [6.0, 20], [7.0, 23], [8.0, 26], [9.0, 29], [10.0, 31], [11.0, 26], [12.0, 21], [13.0, 18], [14.0, 14], [15.0, 13], [16.0, 19], [17.0, 24], [18.0, 28], [19.0, 30], [20.0, 32]]
dataFor = [[1.0, 11], [2.0, 13], [3.0, 14], [4.0, 11], [5.0, 13], [6.0, 18], [7.0, 20], [8.0, 26], [9.0, 21], [10.0, 34], [11.0, 23], [12.0, 23], [13.0, 15], [14.0, 12], [15.0, 14], [16.0, 17], [17.0, 25], [18.0, 22], [19.0, 14], [20.0, 30]]
historyLength = 5
em = MeanAbsoluteScaledError(historyLength=historyLength)
# A history length of 5 implies that the first 6 values have to be ignored for error calculation
historyLength += 1
dataOrg = dataOrg[historyLength:]
dataFor = dataFor[historyLength:]
for orgValue, forValue in zip(dataOrg, dataFor):
difference = orgValue[1] - forValue[1]
difference = abs(difference)
assert difference == em.local_error([orgValue[1]], [forValue[1]])
def initialization_test(self):
"""Test for MASE initialization."""
dataOrg = [[1.0, 10], [2.0, 12], [3.0, 14], [4.0, 13], [5.0, 17], [6.0, 20], [7.0, 23], [8.0, 26], [9.0, 29], [10.0, 31], [11.0, 26], [12.0, 21], [13.0, 18], [14.0, 14], [15.0, 13], [16.0, 19], [17.0, 24], [18.0, 28], [19.0, 30], [20.0, 32]]
dataFor = [[1.0, 11], [2.0, 13], [3.0, 14], [4.0, 11], [5.0, 13], [6.0, 18], [7.0, 20], [8.0, 26], [9.0, 21], [10.0, 34], [11.0, 23], [12.0, 23], [13.0, 15], [14.0, 12], [15.0, 14], [16.0, 17], [17.0, 25], [18.0, 22], [19.0, 14], [20.0, 30]]
tsOrg = TimeSeries.from_twodim_list(dataOrg)
tsFor = TimeSeries.from_twodim_list(dataFor)
em = MeanAbsoluteScaledError(historyLength=5)
em.initialize(tsOrg, tsFor)
assert len(em._errorValues) == len(em._historicMeans), "For each error value an historic mean has to exsist."
try:
em.initialize(tsOrg, tsFor)
except StandardError:
pass
else:
assert False # pragma: no cover
em = MeanAbsoluteScaledError(historyLength=20.0)
em.initialize(tsOrg, tsFor)
assert len(em._errorValues) == len(em._historicMeans), "For each error value an historic mean has to exsist."
assert em._historyLength == 4, "The history is %s entries long. 4 were expected." % em._historyLength
em = MeanAbsoluteScaledError(historyLength=40.0)
em.initialize(tsOrg, tsFor)
assert len(em._errorValues) == len(em._historicMeans), "For each error value an historic mean has to exsist."
assert em._historyLength == 8, "The history is %s entries long. 8 were expected." % em._historyLength
def error_calculation_test(self):
"""Testing for the correct MASE calculation.
History length is 5 in this test.
"""
dataOrg = [[1.0, 10], [2.0, 12], [3.0, 14], [4.0, 13], [5.0, 17], [6.0, 20], [7.0, 23], [8.0, 26], [9.0, 29], [10.0, 31], [11.0, 26], [12.0, 21], [13.0, 18], [14.0, 14], [15.0, 13], [16.0, 19], [17.0, 24], [18.0, 28], [19.0, 30], [20.0, 32]]
dataFor = [[1.0, 11], [2.0, 13], [3.0, 14], [4.0, 11], [5.0, 13], [6.0, 18], [7.0, 20], [8.0, 26], [9.0, 21], [10.0, 34], [11.0, 23], [12.0, 23], [13.0, 15], [14.0, 12], [15.0, 14], [16.0, 17], [17.0, 25], [18.0, 22], [19.0, 14], [20.0, 30]]
# 2 2 1 4 3 3 3 3 2 5 5 3 4 1 6 5 4 2 2
# Sum(History) 12 13 14 16 14 16 18 18 19 18 19 19 20 18 19
# Mean(History) # # # # # 2.4 2.6 2.8 3.2 2.8 3.2 3.6 3.6 3.8 3.6 3.8 3.8 4.0 3.6 3.8
# AD 3 0 8 3 3 2 3 2 1 2 1 6 16 2
# Sum(AD) 3 3 11 14 17 19 22 24 25 27 28 34 50 52
# MAD 3 1.5 3.666 3.5 3.4 3.166 3.142 3 2.777 2.7 2.545 2.833 3.571 3.714
# MASE (0% - 100%) 1.25 0.625 1.527 1.458 1.416 1.319 1.309 1.25 1.157 1.125 1.06 1.18 1.602 1.547
tsOrg = TimeSeries.from_twodim_list(dataOrg)
tsFor = TimeSeries.from_twodim_list(dataFor)
historyLength = 5
em = MeanAbsoluteScaledError(historyLength=historyLength)
em.initialize(tsOrg, tsFor)
# check for error calculation depending on a specific endpoint
correctResult = [1.25, 0.625, 1.527, 1.458, 1.416, 1.319, 1.309, 1.25, 1.157, 1.125, "1.060", "1.180", 1.602, 1.547]
percentage = 100.0 / len(correctResult) + 0.2
for errVal in xrange(14):
endPercentage = percentage * (errVal + 1)
# set maximum percentage
if endPercentage > 100.0:
endPercentage = 100.0
calcErr = str(em.get_error(endPercentage=endPercentage))[:5]
correctRes = str(correctResult[errVal])[:5]
assert calcErr == correctRes
for errVal in xrange(14):
endDate = dataOrg[errVal + 6][0]
calcErr = str(em.get_error(endDate=endDate))[:5]
correctRes = str(correctResult[errVal])[:5]
assert calcErr == correctRes, "%s != %s" % (calcErr, correctRes)
em.get_error(startDate=7.0)
try:
em.get_error(startDate=42.23)
except ValueError:
pass
else:
assert False # pragma: no cover
| T-002/pycast | pycast/tests/meanabsolutescalederrortest.py | Python | mit | 10,136 |
#!/usr/bin/python -u
"""
Autotest scheduler
"""
import common
import datetime, errno, optparse, os, pwd, Queue, re, shutil, signal
import smtplib, socket, stat, subprocess, sys, tempfile, time, traceback, urllib
import itertools, logging, weakref, gc
import MySQLdb
from autotest_lib.scheduler import scheduler_logging_config
from autotest_lib.frontend import setup_django_environment
import django.db
from autotest_lib.client.common_lib import global_config, logging_manager
from autotest_lib.client.common_lib import host_protections, utils
from autotest_lib.database import database_connection
from autotest_lib.frontend.afe import models, rpc_utils, readonly_connection
from autotest_lib.frontend.afe import model_attributes
from autotest_lib.scheduler import drone_manager, drones, email_manager
from autotest_lib.scheduler import monitor_db_cleanup
from autotest_lib.scheduler import status_server, scheduler_config
from autotest_lib.scheduler import gc_stats, metahost_scheduler
from autotest_lib.scheduler import scheduler_models
BABYSITTER_PID_FILE_PREFIX = 'monitor_db_babysitter'
PID_FILE_PREFIX = 'monitor_db'
RESULTS_DIR = '.'
AUTOSERV_NICE_LEVEL = 10
DB_CONFIG_SECTION = 'AUTOTEST_WEB'
AUTOTEST_PATH = os.path.join(os.path.dirname(__file__), '..')
if os.environ.has_key('AUTOTEST_DIR'):
AUTOTEST_PATH = os.environ['AUTOTEST_DIR']
AUTOTEST_SERVER_DIR = os.path.join(AUTOTEST_PATH, 'server')
AUTOTEST_TKO_DIR = os.path.join(AUTOTEST_PATH, 'tko')
if AUTOTEST_SERVER_DIR not in sys.path:
sys.path.insert(0, AUTOTEST_SERVER_DIR)
# error message to leave in results dir when an autoserv process disappears
# mysteriously
_LOST_PROCESS_ERROR = """\
Autoserv failed abnormally during execution for this job, probably due to a
system error on the Autotest server. Full results may not be available. Sorry.
"""
_db = None
_shutdown = False
_autoserv_path = os.path.join(drones.AUTOTEST_INSTALL_DIR, 'server', 'autoserv')
_testing_mode = False
_drone_manager = None
def _parser_path_default(install_dir):
return os.path.join(install_dir, 'tko', 'parse')
_parser_path_func = utils.import_site_function(
__file__, 'autotest_lib.scheduler.site_monitor_db',
'parser_path', _parser_path_default)
_parser_path = _parser_path_func(drones.AUTOTEST_INSTALL_DIR)
def _get_pidfile_timeout_secs():
"""@returns How long to wait for autoserv to write pidfile."""
pidfile_timeout_mins = global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION, 'pidfile_timeout_mins', type=int)
return pidfile_timeout_mins * 60
def _site_init_monitor_db_dummy():
return {}
get_site_metahost_schedulers = utils.import_site_function(
__file__, 'autotest_lib.scheduler.site_metahost_scheduler',
'get_metahost_schedulers', lambda : ())
def _verify_default_drone_set_exists():
if (models.DroneSet.drone_sets_enabled() and
not models.DroneSet.default_drone_set_name()):
raise SchedulerError('Drone sets are enabled, but no default is set')
def _sanity_check():
"""Make sure the configs are consistent before starting the scheduler"""
_verify_default_drone_set_exists()
def main():
try:
try:
main_without_exception_handling()
except SystemExit:
raise
except:
logging.exception('Exception escaping in monitor_db')
raise
finally:
utils.delete_pid_file_if_exists(PID_FILE_PREFIX)
def main_without_exception_handling():
setup_logging()
usage = 'usage: %prog [options] results_dir'
parser = optparse.OptionParser(usage)
parser.add_option('--recover-hosts', help='Try to recover dead hosts',
action='store_true')
parser.add_option('--test', help='Indicate that scheduler is under ' +
'test and should use dummy autoserv and no parsing',
action='store_true')
(options, args) = parser.parse_args()
if len(args) != 1:
parser.print_usage()
return
scheduler_enabled = global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION, 'enable_scheduler', type=bool)
if not scheduler_enabled:
msg = ("Scheduler not enabled, set enable_scheduler to true in the "
"global_config's SCHEDULER section to enabled it. Exiting.")
logging.error(msg)
sys.exit(1)
global RESULTS_DIR
RESULTS_DIR = args[0]
site_init = utils.import_site_function(__file__,
"autotest_lib.scheduler.site_monitor_db", "site_init_monitor_db",
_site_init_monitor_db_dummy)
site_init()
# Change the cwd while running to avoid issues incase we were launched from
# somewhere odd (such as a random NFS home directory of the person running
# sudo to launch us as the appropriate user).
os.chdir(RESULTS_DIR)
# This is helpful for debugging why stuff a scheduler launches is
# misbehaving.
logging.info('os.environ: %s', os.environ)
if options.test:
global _autoserv_path
_autoserv_path = 'autoserv_dummy'
global _testing_mode
_testing_mode = True
server = status_server.StatusServer()
server.start()
try:
initialize()
dispatcher = Dispatcher()
dispatcher.initialize(recover_hosts=options.recover_hosts)
while not _shutdown:
dispatcher.tick()
time.sleep(scheduler_config.config.tick_pause_sec)
except:
email_manager.manager.log_stacktrace(
"Uncaught exception; terminating monitor_db")
email_manager.manager.send_queued_emails()
server.shutdown()
_drone_manager.shutdown()
_db.disconnect()
def setup_logging():
log_dir = os.environ.get('AUTOTEST_SCHEDULER_LOG_DIR', None)
log_name = os.environ.get('AUTOTEST_SCHEDULER_LOG_NAME', None)
logging_manager.configure_logging(
scheduler_logging_config.SchedulerLoggingConfig(), log_dir=log_dir,
logfile_name=log_name)
def handle_sigint(signum, frame):
global _shutdown
_shutdown = True
logging.info("Shutdown request received.")
def initialize():
logging.info("%s> dispatcher starting", time.strftime("%X %x"))
logging.info("My PID is %d", os.getpid())
if utils.program_is_alive(PID_FILE_PREFIX):
logging.critical("monitor_db already running, aborting!")
sys.exit(1)
utils.write_pid(PID_FILE_PREFIX)
if _testing_mode:
global_config.global_config.override_config_value(
DB_CONFIG_SECTION, 'database', 'stresstest_autotest_web')
os.environ['PATH'] = AUTOTEST_SERVER_DIR + ':' + os.environ['PATH']
global _db
_db = database_connection.DatabaseConnection(DB_CONFIG_SECTION)
_db.connect(db_type='django')
# ensure Django connection is in autocommit
setup_django_environment.enable_autocommit()
# bypass the readonly connection
readonly_connection.ReadOnlyConnection.set_globally_disabled(True)
logging.info("Setting signal handler")
signal.signal(signal.SIGINT, handle_sigint)
initialize_globals()
scheduler_models.initialize()
drones = global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION, 'drones', default='localhost')
drone_list = [hostname.strip() for hostname in drones.split(',')]
results_host = global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION, 'results_host', default='localhost')
_drone_manager.initialize(RESULTS_DIR, drone_list, results_host)
logging.info("Connected! Running...")
def initialize_globals():
global _drone_manager
_drone_manager = drone_manager.instance()
def _autoserv_command_line(machines, extra_args, job=None, queue_entry=None,
verbose=True):
"""
@returns The autoserv command line as a list of executable + parameters.
@param machines - string - A machine or comma separated list of machines
for the (-m) flag.
@param extra_args - list - Additional arguments to pass to autoserv.
@param job - Job object - If supplied, -u owner and -l name parameters
will be added.
@param queue_entry - A HostQueueEntry object - If supplied and no Job
object was supplied, this will be used to lookup the Job object.
"""
autoserv_argv = [_autoserv_path, '-p',
'-r', drone_manager.WORKING_DIRECTORY]
if machines:
autoserv_argv += ['-m', machines]
if job or queue_entry:
if not job:
job = queue_entry.job
autoserv_argv += ['-u', job.owner, '-l', job.name]
if verbose:
autoserv_argv.append('--verbose')
return autoserv_argv + extra_args
class SchedulerError(Exception):
"""Raised by HostScheduler when an inconsistent state occurs."""
class BaseHostScheduler(metahost_scheduler.HostSchedulingUtility):
"""Handles the logic for choosing when to run jobs and on which hosts.
This class makes several queries to the database on each tick, building up
some auxiliary data structures and using them to determine which hosts are
eligible to run which jobs, taking into account all the various factors that
affect that.
In the past this was done with one or two very large, complex database
queries. It has proven much simpler and faster to build these auxiliary
data structures and perform the logic in Python.
"""
def __init__(self):
self._metahost_schedulers = metahost_scheduler.get_metahost_schedulers()
# load site-specific scheduler selected in global_config
site_schedulers_str = global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION, 'site_metahost_schedulers',
default='')
site_schedulers = set(site_schedulers_str.split(','))
for scheduler in get_site_metahost_schedulers():
if type(scheduler).__name__ in site_schedulers:
# always prepend, so site schedulers take precedence
self._metahost_schedulers = (
[scheduler] + self._metahost_schedulers)
logging.info('Metahost schedulers: %s',
', '.join(type(scheduler).__name__ for scheduler
in self._metahost_schedulers))
def _get_ready_hosts(self):
# avoid any host with a currently active queue entry against it
hosts = scheduler_models.Host.fetch(
joins='LEFT JOIN afe_host_queue_entries AS active_hqe '
'ON (afe_hosts.id = active_hqe.host_id AND '
'active_hqe.active)',
where="active_hqe.host_id IS NULL "
"AND NOT afe_hosts.locked "
"AND (afe_hosts.status IS NULL "
"OR afe_hosts.status = 'Ready')")
return dict((host.id, host) for host in hosts)
@staticmethod
def _get_sql_id_list(id_list):
return ','.join(str(item_id) for item_id in id_list)
@classmethod
def _get_many2many_dict(cls, query, id_list, flip=False):
if not id_list:
return {}
query %= cls._get_sql_id_list(id_list)
rows = _db.execute(query)
return cls._process_many2many_dict(rows, flip)
@staticmethod
def _process_many2many_dict(rows, flip=False):
result = {}
for row in rows:
left_id, right_id = int(row[0]), int(row[1])
if flip:
left_id, right_id = right_id, left_id
result.setdefault(left_id, set()).add(right_id)
return result
@classmethod
def _get_job_acl_groups(cls, job_ids):
query = """
SELECT afe_jobs.id, afe_acl_groups_users.aclgroup_id
FROM afe_jobs
INNER JOIN afe_users ON afe_users.login = afe_jobs.owner
INNER JOIN afe_acl_groups_users ON
afe_acl_groups_users.user_id = afe_users.id
WHERE afe_jobs.id IN (%s)
"""
return cls._get_many2many_dict(query, job_ids)
@classmethod
def _get_job_ineligible_hosts(cls, job_ids):
query = """
SELECT job_id, host_id
FROM afe_ineligible_host_queues
WHERE job_id IN (%s)
"""
return cls._get_many2many_dict(query, job_ids)
@classmethod
def _get_job_dependencies(cls, job_ids):
query = """
SELECT job_id, label_id
FROM afe_jobs_dependency_labels
WHERE job_id IN (%s)
"""
return cls._get_many2many_dict(query, job_ids)
@classmethod
def _get_host_acls(cls, host_ids):
query = """
SELECT host_id, aclgroup_id
FROM afe_acl_groups_hosts
WHERE host_id IN (%s)
"""
return cls._get_many2many_dict(query, host_ids)
@classmethod
def _get_label_hosts(cls, host_ids):
if not host_ids:
return {}, {}
query = """
SELECT label_id, host_id
FROM afe_hosts_labels
WHERE host_id IN (%s)
""" % cls._get_sql_id_list(host_ids)
rows = _db.execute(query)
labels_to_hosts = cls._process_many2many_dict(rows)
hosts_to_labels = cls._process_many2many_dict(rows, flip=True)
return labels_to_hosts, hosts_to_labels
@classmethod
def _get_labels(cls):
return dict((label.id, label) for label
in scheduler_models.Label.fetch())
def recovery_on_startup(self):
for metahost_scheduler in self._metahost_schedulers:
metahost_scheduler.recovery_on_startup()
def refresh(self, pending_queue_entries):
self._hosts_available = self._get_ready_hosts()
relevant_jobs = [queue_entry.job_id
for queue_entry in pending_queue_entries]
self._job_acls = self._get_job_acl_groups(relevant_jobs)
self._ineligible_hosts = self._get_job_ineligible_hosts(relevant_jobs)
self._job_dependencies = self._get_job_dependencies(relevant_jobs)
host_ids = self._hosts_available.keys()
self._host_acls = self._get_host_acls(host_ids)
self._label_hosts, self._host_labels = self._get_label_hosts(host_ids)
self._labels = self._get_labels()
def tick(self):
for metahost_scheduler in self._metahost_schedulers:
metahost_scheduler.tick()
def hosts_in_label(self, label_id):
return set(self._label_hosts.get(label_id, ()))
def remove_host_from_label(self, host_id, label_id):
self._label_hosts[label_id].remove(host_id)
def pop_host(self, host_id):
return self._hosts_available.pop(host_id)
def ineligible_hosts_for_entry(self, queue_entry):
return set(self._ineligible_hosts.get(queue_entry.job_id, ()))
def _is_acl_accessible(self, host_id, queue_entry):
job_acls = self._job_acls.get(queue_entry.job_id, set())
host_acls = self._host_acls.get(host_id, set())
return len(host_acls.intersection(job_acls)) > 0
def _check_job_dependencies(self, job_dependencies, host_labels):
missing = job_dependencies - host_labels
return len(missing) == 0
def _check_only_if_needed_labels(self, job_dependencies, host_labels,
queue_entry):
if not queue_entry.meta_host:
# bypass only_if_needed labels when a specific host is selected
return True
for label_id in host_labels:
label = self._labels[label_id]
if not label.only_if_needed:
# we don't care about non-only_if_needed labels
continue
if queue_entry.meta_host == label_id:
# if the label was requested in a metahost it's OK
continue
if label_id not in job_dependencies:
return False
return True
def _check_atomic_group_labels(self, host_labels, queue_entry):
"""
Determine if the given HostQueueEntry's atomic group settings are okay
to schedule on a host with the given labels.
@param host_labels: A list of label ids that the host has.
@param queue_entry: The HostQueueEntry being considered for the host.
@returns True if atomic group settings are okay, False otherwise.
"""
return (self._get_host_atomic_group_id(host_labels, queue_entry) ==
queue_entry.atomic_group_id)
def _get_host_atomic_group_id(self, host_labels, queue_entry=None):
"""
Return the atomic group label id for a host with the given set of
labels if any, or None otherwise. Raises an exception if more than
one atomic group are found in the set of labels.
@param host_labels: A list of label ids that the host has.
@param queue_entry: The HostQueueEntry we're testing. Only used for
extra info in a potential logged error message.
@returns The id of the atomic group found on a label in host_labels
or None if no atomic group label is found.
"""
atomic_labels = [self._labels[label_id] for label_id in host_labels
if self._labels[label_id].atomic_group_id is not None]
atomic_ids = set(label.atomic_group_id for label in atomic_labels)
if not atomic_ids:
return None
if len(atomic_ids) > 1:
logging.error('More than one Atomic Group on HQE "%s" via: %r',
queue_entry, atomic_labels)
return atomic_ids.pop()
def _get_atomic_group_labels(self, atomic_group_id):
"""
Lookup the label ids that an atomic_group is associated with.
@param atomic_group_id - The id of the AtomicGroup to look up.
@returns A generator yeilding Label ids for this atomic group.
"""
return (id for id, label in self._labels.iteritems()
if label.atomic_group_id == atomic_group_id
and not label.invalid)
def _get_eligible_host_ids_in_group(self, group_hosts, queue_entry):
"""
@param group_hosts - A sequence of Host ids to test for usability
and eligibility against the Job associated with queue_entry.
@param queue_entry - The HostQueueEntry that these hosts are being
tested for eligibility against.
@returns A subset of group_hosts Host ids that are eligible for the
supplied queue_entry.
"""
return set(host_id for host_id in group_hosts
if self.is_host_usable(host_id)
and self.is_host_eligible_for_job(host_id, queue_entry))
def is_host_eligible_for_job(self, host_id, queue_entry):
if self._is_host_invalid(host_id):
# if an invalid host is scheduled for a job, it's a one-time host
# and it therefore bypasses eligibility checks. note this can only
# happen for non-metahosts, because invalid hosts have their label
# relationships cleared.
return True
job_dependencies = self._job_dependencies.get(queue_entry.job_id, set())
host_labels = self._host_labels.get(host_id, set())
return (self._is_acl_accessible(host_id, queue_entry) and
self._check_job_dependencies(job_dependencies, host_labels) and
self._check_only_if_needed_labels(
job_dependencies, host_labels, queue_entry) and
self._check_atomic_group_labels(host_labels, queue_entry))
def _is_host_invalid(self, host_id):
host_object = self._hosts_available.get(host_id, None)
return host_object and host_object.invalid
def _schedule_non_metahost(self, queue_entry):
if not self.is_host_eligible_for_job(queue_entry.host_id, queue_entry):
return None
return self._hosts_available.pop(queue_entry.host_id, None)
def is_host_usable(self, host_id):
if host_id not in self._hosts_available:
# host was already used during this scheduling cycle
return False
if self._hosts_available[host_id].invalid:
# Invalid hosts cannot be used for metahosts. They're included in
# the original query because they can be used by non-metahosts.
return False
return True
def schedule_entry(self, queue_entry):
if queue_entry.host_id is not None:
return self._schedule_non_metahost(queue_entry)
for scheduler in self._metahost_schedulers:
if scheduler.can_schedule_metahost(queue_entry):
scheduler.schedule_metahost(queue_entry, self)
return None
raise SchedulerError('No metahost scheduler to handle %s' % queue_entry)
def find_eligible_atomic_group(self, queue_entry):
"""
Given an atomic group host queue entry, locate an appropriate group
of hosts for the associated job to run on.
The caller is responsible for creating new HQEs for the additional
hosts returned in order to run the actual job on them.
@returns A list of Host instances in a ready state to satisfy this
atomic group scheduling. Hosts will all belong to the same
atomic group label as specified by the queue_entry.
An empty list will be returned if no suitable atomic
group could be found.
TODO(gps): what is responsible for kicking off any attempted repairs on
a group of hosts? not this function, but something needs to. We do
not communicate that reason for returning [] outside of here...
For now, we'll just be unschedulable if enough hosts within one group
enter Repair Failed state.
"""
assert queue_entry.atomic_group_id is not None
job = queue_entry.job
assert job.synch_count and job.synch_count > 0
atomic_group = queue_entry.atomic_group
if job.synch_count > atomic_group.max_number_of_machines:
# Such a Job and HostQueueEntry should never be possible to
# create using the frontend. Regardless, we can't process it.
# Abort it immediately and log an error on the scheduler.
queue_entry.set_status(models.HostQueueEntry.Status.ABORTED)
logging.error(
'Error: job %d synch_count=%d > requested atomic_group %d '
'max_number_of_machines=%d. Aborted host_queue_entry %d.',
job.id, job.synch_count, atomic_group.id,
atomic_group.max_number_of_machines, queue_entry.id)
return []
hosts_in_label = self.hosts_in_label(queue_entry.meta_host)
ineligible_host_ids = self.ineligible_hosts_for_entry(queue_entry)
# Look in each label associated with atomic_group until we find one with
# enough hosts to satisfy the job.
for atomic_label_id in self._get_atomic_group_labels(atomic_group.id):
group_hosts = set(self.hosts_in_label(atomic_label_id))
if queue_entry.meta_host is not None:
# If we have a metahost label, only allow its hosts.
group_hosts.intersection_update(hosts_in_label)
group_hosts -= ineligible_host_ids
eligible_host_ids_in_group = self._get_eligible_host_ids_in_group(
group_hosts, queue_entry)
# Job.synch_count is treated as "minimum synch count" when
# scheduling for an atomic group of hosts. The atomic group
# number of machines is the maximum to pick out of a single
# atomic group label for scheduling at one time.
min_hosts = job.synch_count
max_hosts = atomic_group.max_number_of_machines
if len(eligible_host_ids_in_group) < min_hosts:
# Not enough eligible hosts in this atomic group label.
continue
eligible_hosts_in_group = [self._hosts_available[id]
for id in eligible_host_ids_in_group]
# So that they show up in a sane order when viewing the job.
eligible_hosts_in_group.sort(cmp=scheduler_models.Host.cmp_for_sort)
# Limit ourselves to scheduling the atomic group size.
if len(eligible_hosts_in_group) > max_hosts:
eligible_hosts_in_group = eligible_hosts_in_group[:max_hosts]
# Remove the selected hosts from our cached internal state
# of available hosts in order to return the Host objects.
host_list = []
for host in eligible_hosts_in_group:
hosts_in_label.discard(host.id)
self._hosts_available.pop(host.id)
host_list.append(host)
return host_list
return []
site_host_scheduler = utils.import_site_class(__file__,
"autotest_lib.scheduler.site_host_scheduler",
"site_host_scheduler", BaseHostScheduler)
class HostScheduler(site_host_scheduler):
pass
class Dispatcher(object):
def __init__(self):
self._agents = []
self._last_clean_time = time.time()
self._host_scheduler = HostScheduler()
user_cleanup_time = scheduler_config.config.clean_interval
self._periodic_cleanup = monitor_db_cleanup.UserCleanup(
_db, user_cleanup_time)
self._24hr_upkeep = monitor_db_cleanup.TwentyFourHourUpkeep(_db)
self._host_agents = {}
self._queue_entry_agents = {}
self._tick_count = 0
self._last_garbage_stats_time = time.time()
self._seconds_between_garbage_stats = 60 * (
global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION,
'gc_stats_interval_mins', type=int, default=6*60))
def initialize(self, recover_hosts=True):
self._periodic_cleanup.initialize()
self._24hr_upkeep.initialize()
# always recover processes
self._recover_processes()
if recover_hosts:
self._recover_hosts()
self._host_scheduler.recovery_on_startup()
def tick(self):
self._garbage_collection()
_drone_manager.refresh()
self._run_cleanup()
self._find_aborting()
self._process_recurring_runs()
self._schedule_delay_tasks()
self._schedule_running_host_queue_entries()
self._schedule_special_tasks()
self._schedule_new_jobs()
self._handle_agents()
self._host_scheduler.tick()
_drone_manager.execute_actions()
email_manager.manager.send_queued_emails()
django.db.reset_queries()
self._tick_count += 1
def _run_cleanup(self):
self._periodic_cleanup.run_cleanup_maybe()
self._24hr_upkeep.run_cleanup_maybe()
def _garbage_collection(self):
threshold_time = time.time() - self._seconds_between_garbage_stats
if threshold_time < self._last_garbage_stats_time:
# Don't generate these reports very often.
return
self._last_garbage_stats_time = time.time()
# Force a full level 0 collection (because we can, it doesn't hurt
# at this interval).
gc.collect()
logging.info('Logging garbage collector stats on tick %d.',
self._tick_count)
gc_stats._log_garbage_collector_stats()
def _register_agent_for_ids(self, agent_dict, object_ids, agent):
for object_id in object_ids:
agent_dict.setdefault(object_id, set()).add(agent)
def _unregister_agent_for_ids(self, agent_dict, object_ids, agent):
for object_id in object_ids:
assert object_id in agent_dict
agent_dict[object_id].remove(agent)
def add_agent_task(self, agent_task):
agent = Agent(agent_task)
self._agents.append(agent)
agent.dispatcher = self
self._register_agent_for_ids(self._host_agents, agent.host_ids, agent)
self._register_agent_for_ids(self._queue_entry_agents,
agent.queue_entry_ids, agent)
def get_agents_for_entry(self, queue_entry):
"""
Find agents corresponding to the specified queue_entry.
"""
return list(self._queue_entry_agents.get(queue_entry.id, set()))
def host_has_agent(self, host):
"""
Determine if there is currently an Agent present using this host.
"""
return bool(self._host_agents.get(host.id, None))
def remove_agent(self, agent):
self._agents.remove(agent)
self._unregister_agent_for_ids(self._host_agents, agent.host_ids,
agent)
self._unregister_agent_for_ids(self._queue_entry_agents,
agent.queue_entry_ids, agent)
def _host_has_scheduled_special_task(self, host):
return bool(models.SpecialTask.objects.filter(host__id=host.id,
is_active=False,
is_complete=False))
def _recover_processes(self):
agent_tasks = self._create_recovery_agent_tasks()
self._register_pidfiles(agent_tasks)
_drone_manager.refresh()
self._recover_tasks(agent_tasks)
self._recover_pending_entries()
self._check_for_unrecovered_verifying_entries()
self._reverify_remaining_hosts()
# reinitialize drones after killing orphaned processes, since they can
# leave around files when they die
_drone_manager.execute_actions()
_drone_manager.reinitialize_drones()
def _create_recovery_agent_tasks(self):
return (self._get_queue_entry_agent_tasks()
+ self._get_special_task_agent_tasks(is_active=True))
def _get_queue_entry_agent_tasks(self):
# host queue entry statuses handled directly by AgentTasks (Verifying is
# handled through SpecialTasks, so is not listed here)
statuses = (models.HostQueueEntry.Status.STARTING,
models.HostQueueEntry.Status.RUNNING,
models.HostQueueEntry.Status.GATHERING,
models.HostQueueEntry.Status.PARSING,
models.HostQueueEntry.Status.ARCHIVING)
status_list = ','.join("'%s'" % status for status in statuses)
queue_entries = scheduler_models.HostQueueEntry.fetch(
where='status IN (%s)' % status_list)
agent_tasks = []
used_queue_entries = set()
for entry in queue_entries:
if self.get_agents_for_entry(entry):
# already being handled
continue
if entry in used_queue_entries:
# already picked up by a synchronous job
continue
agent_task = self._get_agent_task_for_queue_entry(entry)
agent_tasks.append(agent_task)
used_queue_entries.update(agent_task.queue_entries)
return agent_tasks
def _get_special_task_agent_tasks(self, is_active=False):
special_tasks = models.SpecialTask.objects.filter(
is_active=is_active, is_complete=False)
return [self._get_agent_task_for_special_task(task)
for task in special_tasks]
def _get_agent_task_for_queue_entry(self, queue_entry):
"""
Construct an AgentTask instance for the given active HostQueueEntry,
if one can currently run it.
@param queue_entry: a HostQueueEntry
@returns an AgentTask to run the queue entry
"""
task_entries = queue_entry.job.get_group_entries(queue_entry)
self._check_for_duplicate_host_entries(task_entries)
if queue_entry.status in (models.HostQueueEntry.Status.STARTING,
models.HostQueueEntry.Status.RUNNING):
if queue_entry.is_hostless():
return HostlessQueueTask(queue_entry=queue_entry)
return QueueTask(queue_entries=task_entries)
if queue_entry.status == models.HostQueueEntry.Status.GATHERING:
return GatherLogsTask(queue_entries=task_entries)
if queue_entry.status == models.HostQueueEntry.Status.PARSING:
return FinalReparseTask(queue_entries=task_entries)
if queue_entry.status == models.HostQueueEntry.Status.ARCHIVING:
return ArchiveResultsTask(queue_entries=task_entries)
raise SchedulerError('_get_agent_task_for_queue_entry got entry with '
'invalid status %s: %s'
% (queue_entry.status, queue_entry))
def _check_for_duplicate_host_entries(self, task_entries):
non_host_statuses = (models.HostQueueEntry.Status.PARSING,
models.HostQueueEntry.Status.ARCHIVING)
for task_entry in task_entries:
using_host = (task_entry.host is not None
and task_entry.status not in non_host_statuses)
if using_host:
self._assert_host_has_no_agent(task_entry)
def _assert_host_has_no_agent(self, entry):
"""
@param entry: a HostQueueEntry or a SpecialTask
"""
if self.host_has_agent(entry.host):
agent = tuple(self._host_agents.get(entry.host.id))[0]
raise SchedulerError(
'While scheduling %s, host %s already has a host agent %s'
% (entry, entry.host, agent.task))
def _get_agent_task_for_special_task(self, special_task):
"""
Construct an AgentTask class to run the given SpecialTask and add it
to this dispatcher.
@param special_task: a models.SpecialTask instance
@returns an AgentTask to run this SpecialTask
"""
self._assert_host_has_no_agent(special_task)
special_agent_task_classes = (CleanupTask, VerifyTask, RepairTask)
for agent_task_class in special_agent_task_classes:
if agent_task_class.TASK_TYPE == special_task.task:
return agent_task_class(task=special_task)
raise SchedulerError('No AgentTask class for task', str(special_task))
def _register_pidfiles(self, agent_tasks):
for agent_task in agent_tasks:
agent_task.register_necessary_pidfiles()
def _recover_tasks(self, agent_tasks):
orphans = _drone_manager.get_orphaned_autoserv_processes()
for agent_task in agent_tasks:
agent_task.recover()
if agent_task.monitor and agent_task.monitor.has_process():
orphans.discard(agent_task.monitor.get_process())
self.add_agent_task(agent_task)
self._check_for_remaining_orphan_processes(orphans)
def _get_unassigned_entries(self, status):
for entry in scheduler_models.HostQueueEntry.fetch(where="status = '%s'"
% status):
if entry.status == status and not self.get_agents_for_entry(entry):
# The status can change during iteration, e.g., if job.run()
# sets a group of queue entries to Starting
yield entry
def _check_for_remaining_orphan_processes(self, orphans):
if not orphans:
return
subject = 'Unrecovered orphan autoserv processes remain'
message = '\n'.join(str(process) for process in orphans)
email_manager.manager.enqueue_notify_email(subject, message)
die_on_orphans = global_config.global_config.get_config_value(
scheduler_config.CONFIG_SECTION, 'die_on_orphans', type=bool)
if die_on_orphans:
raise RuntimeError(subject + '\n' + message)
def _recover_pending_entries(self):
for entry in self._get_unassigned_entries(
models.HostQueueEntry.Status.PENDING):
logging.info('Recovering Pending entry %s', entry)
entry.on_pending()
def _check_for_unrecovered_verifying_entries(self):
queue_entries = scheduler_models.HostQueueEntry.fetch(
where='status = "%s"' % models.HostQueueEntry.Status.VERIFYING)
unrecovered_hqes = []
for queue_entry in queue_entries:
special_tasks = models.SpecialTask.objects.filter(
task__in=(models.SpecialTask.Task.CLEANUP,
models.SpecialTask.Task.VERIFY),
queue_entry__id=queue_entry.id,
is_complete=False)
if special_tasks.count() == 0:
unrecovered_hqes.append(queue_entry)
if unrecovered_hqes:
message = '\n'.join(str(hqe) for hqe in unrecovered_hqes)
raise SchedulerError(
'%d unrecovered verifying host queue entries:\n%s' %
(len(unrecovered_hqes), message))
def _get_prioritized_special_tasks(self):
"""
Returns all queued SpecialTasks prioritized for repair first, then
cleanup, then verify.
"""
queued_tasks = models.SpecialTask.objects.filter(is_active=False,
is_complete=False,
host__locked=False)
# exclude hosts with active queue entries unless the SpecialTask is for
# that queue entry
queued_tasks = models.SpecialTask.objects.add_join(
queued_tasks, 'afe_host_queue_entries', 'host_id',
join_condition='afe_host_queue_entries.active',
join_from_key='host_id', force_left_join=True)
queued_tasks = queued_tasks.extra(
where=['(afe_host_queue_entries.id IS NULL OR '
'afe_host_queue_entries.id = '
'afe_special_tasks.queue_entry_id)'])
# reorder tasks by priority
task_priority_order = [models.SpecialTask.Task.REPAIR,
models.SpecialTask.Task.CLEANUP,
models.SpecialTask.Task.VERIFY]
def task_priority_key(task):
return task_priority_order.index(task.task)
return sorted(queued_tasks, key=task_priority_key)
def _schedule_special_tasks(self):
"""
Execute queued SpecialTasks that are ready to run on idle hosts.
"""
for task in self._get_prioritized_special_tasks():
if self.host_has_agent(task.host):
continue
self.add_agent_task(self._get_agent_task_for_special_task(task))
def _reverify_remaining_hosts(self):
# recover active hosts that have not yet been recovered, although this
# should never happen
message = ('Recovering active host %s - this probably indicates a '
'scheduler bug')
self._reverify_hosts_where(
"status IN ('Repairing', 'Verifying', 'Cleaning')",
print_message=message)
def _reverify_hosts_where(self, where,
print_message='Reverifying host %s'):
full_where='locked = 0 AND invalid = 0 AND ' + where
for host in scheduler_models.Host.fetch(where=full_where):
if self.host_has_agent(host):
# host has already been recovered in some way
continue
if self._host_has_scheduled_special_task(host):
# host will have a special task scheduled on the next cycle
continue
if print_message:
logging.info(print_message, host.hostname)
models.SpecialTask.objects.create(
task=models.SpecialTask.Task.CLEANUP,
host=models.Host.objects.get(id=host.id))
def _recover_hosts(self):
# recover "Repair Failed" hosts
message = 'Reverifying dead host %s'
self._reverify_hosts_where("status = 'Repair Failed'",
print_message=message)
def _get_pending_queue_entries(self):
# prioritize by job priority, then non-metahost over metahost, then FIFO
return list(scheduler_models.HostQueueEntry.fetch(
joins='INNER JOIN afe_jobs ON (job_id=afe_jobs.id)',
where='NOT complete AND NOT active AND status="Queued"',
order_by='afe_jobs.priority DESC, meta_host, job_id'))
def _refresh_pending_queue_entries(self):
"""
Lookup the pending HostQueueEntries and call our HostScheduler
refresh() method given that list. Return the list.
@returns A list of pending HostQueueEntries sorted in priority order.
"""
queue_entries = self._get_pending_queue_entries()
if not queue_entries:
return []
self._host_scheduler.refresh(queue_entries)
return queue_entries
def _schedule_atomic_group(self, queue_entry):
"""
Schedule the given queue_entry on an atomic group of hosts.
Returns immediately if there are insufficient available hosts.
Creates new HostQueueEntries based off of queue_entry for the
scheduled hosts and starts them all running.
"""
# This is a virtual host queue entry representing an entire
# atomic group, find a group and schedule their hosts.
group_hosts = self._host_scheduler.find_eligible_atomic_group(
queue_entry)
if not group_hosts:
return
logging.info('Expanding atomic group entry %s with hosts %s',
queue_entry,
', '.join(host.hostname for host in group_hosts))
for assigned_host in group_hosts[1:]:
# Create a new HQE for every additional assigned_host.
new_hqe = scheduler_models.HostQueueEntry.clone(queue_entry)
new_hqe.save()
new_hqe.set_host(assigned_host)
self._run_queue_entry(new_hqe)
# The first assigned host uses the original HostQueueEntry
queue_entry.set_host(group_hosts[0])
self._run_queue_entry(queue_entry)
def _schedule_hostless_job(self, queue_entry):
self.add_agent_task(HostlessQueueTask(queue_entry))
queue_entry.set_status(models.HostQueueEntry.Status.STARTING)
def _schedule_new_jobs(self):
queue_entries = self._refresh_pending_queue_entries()
if not queue_entries:
return
for queue_entry in queue_entries:
is_unassigned_atomic_group = (
queue_entry.atomic_group_id is not None
and queue_entry.host_id is None)
if queue_entry.is_hostless():
self._schedule_hostless_job(queue_entry)
elif is_unassigned_atomic_group:
self._schedule_atomic_group(queue_entry)
else:
assigned_host = self._host_scheduler.schedule_entry(queue_entry)
if assigned_host and not self.host_has_agent(assigned_host):
assert assigned_host.id == queue_entry.host_id
self._run_queue_entry(queue_entry)
def _schedule_running_host_queue_entries(self):
for agent_task in self._get_queue_entry_agent_tasks():
self.add_agent_task(agent_task)
def _schedule_delay_tasks(self):
for entry in scheduler_models.HostQueueEntry.fetch(
where='status = "%s"' % models.HostQueueEntry.Status.WAITING):
task = entry.job.schedule_delayed_callback_task(entry)
if task:
self.add_agent_task(task)
def _run_queue_entry(self, queue_entry):
queue_entry.schedule_pre_job_tasks()
def _find_aborting(self):
jobs_to_stop = set()
for entry in scheduler_models.HostQueueEntry.fetch(
where='aborted and not complete'):
logging.info('Aborting %s', entry)
for agent in self.get_agents_for_entry(entry):
agent.abort()
entry.abort(self)
jobs_to_stop.add(entry.job)
for job in jobs_to_stop:
job.stop_if_necessary()
def _can_start_agent(self, agent, num_started_this_cycle,
have_reached_limit):
# always allow zero-process agents to run
if agent.task.num_processes == 0:
return True
# don't allow any nonzero-process agents to run after we've reached a
# limit (this avoids starvation of many-process agents)
if have_reached_limit:
return False
# total process throttling
max_runnable_processes = _drone_manager.max_runnable_processes(
agent.task.owner_username,
agent.task.get_drone_hostnames_allowed())
if agent.task.num_processes > max_runnable_processes:
return False
# if a single agent exceeds the per-cycle throttling, still allow it to
# run when it's the first agent in the cycle
if num_started_this_cycle == 0:
return True
# per-cycle throttling
if (num_started_this_cycle + agent.task.num_processes >
scheduler_config.config.max_processes_started_per_cycle):
return False
return True
def _handle_agents(self):
num_started_this_cycle = 0
have_reached_limit = False
# iterate over copy, so we can remove agents during iteration
for agent in list(self._agents):
if not agent.started:
if not self._can_start_agent(agent, num_started_this_cycle,
have_reached_limit):
have_reached_limit = True
continue
num_started_this_cycle += agent.task.num_processes
agent.tick()
if agent.is_done():
logging.info("agent finished")
self.remove_agent(agent)
logging.info('%d running processes',
_drone_manager.total_running_processes())
def _process_recurring_runs(self):
recurring_runs = models.RecurringRun.objects.filter(
start_date__lte=datetime.datetime.now())
for rrun in recurring_runs:
# Create job from template
job = rrun.job
info = rpc_utils.get_job_info(job)
options = job.get_object_dict()
host_objects = info['hosts']
one_time_hosts = info['one_time_hosts']
metahost_objects = info['meta_hosts']
dependencies = info['dependencies']
atomic_group = info['atomic_group']
for host in one_time_hosts or []:
this_host = models.Host.create_one_time_host(host.hostname)
host_objects.append(this_host)
try:
rpc_utils.create_new_job(owner=rrun.owner.login,
options=options,
host_objects=host_objects,
metahost_objects=metahost_objects,
atomic_group=atomic_group)
except Exception, ex:
logging.exception(ex)
#TODO send email
if rrun.loop_count == 1:
rrun.delete()
else:
if rrun.loop_count != 0: # if not infinite loop
# calculate new start_date
difference = datetime.timedelta(seconds=rrun.loop_period)
rrun.start_date = rrun.start_date + difference
rrun.loop_count -= 1
rrun.save()
class PidfileRunMonitor(object):
"""
Client must call either run() to start a new process or
attach_to_existing_process().
"""
class _PidfileException(Exception):
"""
Raised when there's some unexpected behavior with the pid file, but only
used internally (never allowed to escape this class).
"""
def __init__(self):
self.lost_process = False
self._start_time = None
self.pidfile_id = None
self._state = drone_manager.PidfileContents()
def _add_nice_command(self, command, nice_level):
if not nice_level:
return command
return ['nice', '-n', str(nice_level)] + command
def _set_start_time(self):
self._start_time = time.time()
def run(self, command, working_directory, num_processes, nice_level=None,
log_file=None, pidfile_name=None, paired_with_pidfile=None,
username=None, drone_hostnames_allowed=None):
assert command is not None
if nice_level is not None:
command = ['nice', '-n', str(nice_level)] + command
self._set_start_time()
self.pidfile_id = _drone_manager.execute_command(
command, working_directory, pidfile_name=pidfile_name,
num_processes=num_processes, log_file=log_file,
paired_with_pidfile=paired_with_pidfile, username=username,
drone_hostnames_allowed=drone_hostnames_allowed)
def attach_to_existing_process(self, execution_path,
pidfile_name=drone_manager.AUTOSERV_PID_FILE,
num_processes=None):
self._set_start_time()
self.pidfile_id = _drone_manager.get_pidfile_id_from(
execution_path, pidfile_name=pidfile_name)
if num_processes is not None:
_drone_manager.declare_process_count(self.pidfile_id, num_processes)
def kill(self):
if self.has_process():
_drone_manager.kill_process(self.get_process())
def has_process(self):
self._get_pidfile_info()
return self._state.process is not None
def get_process(self):
self._get_pidfile_info()
assert self._state.process is not None
return self._state.process
def _read_pidfile(self, use_second_read=False):
assert self.pidfile_id is not None, (
'You must call run() or attach_to_existing_process()')
contents = _drone_manager.get_pidfile_contents(
self.pidfile_id, use_second_read=use_second_read)
if contents.is_invalid():
self._state = drone_manager.PidfileContents()
raise self._PidfileException(contents)
self._state = contents
def _handle_pidfile_error(self, error, message=''):
message = error + '\nProcess: %s\nPidfile: %s\n%s' % (
self._state.process, self.pidfile_id, message)
email_manager.manager.enqueue_notify_email(error, message)
self.on_lost_process(self._state.process)
def _get_pidfile_info_helper(self):
if self.lost_process:
return
self._read_pidfile()
if self._state.process is None:
self._handle_no_process()
return
if self._state.exit_status is None:
# double check whether or not autoserv is running
if _drone_manager.is_process_running(self._state.process):
return
# pid but no running process - maybe process *just* exited
self._read_pidfile(use_second_read=True)
if self._state.exit_status is None:
# autoserv exited without writing an exit code
# to the pidfile
self._handle_pidfile_error(
'autoserv died without writing exit code')
def _get_pidfile_info(self):
"""\
After completion, self._state will contain:
pid=None, exit_status=None if autoserv has not yet run
pid!=None, exit_status=None if autoserv is running
pid!=None, exit_status!=None if autoserv has completed
"""
try:
self._get_pidfile_info_helper()
except self._PidfileException, exc:
self._handle_pidfile_error('Pidfile error', traceback.format_exc())
def _handle_no_process(self):
"""\
Called when no pidfile is found or no pid is in the pidfile.
"""
message = 'No pid found at %s' % self.pidfile_id
if time.time() - self._start_time > _get_pidfile_timeout_secs():
email_manager.manager.enqueue_notify_email(
'Process has failed to write pidfile', message)
self.on_lost_process()
def on_lost_process(self, process=None):
"""\
Called when autoserv has exited without writing an exit status,
or we've timed out waiting for autoserv to write a pid to the
pidfile. In either case, we just return failure and the caller
should signal some kind of warning.
process is unimportant here, as it shouldn't be used by anyone.
"""
self.lost_process = True
self._state.process = process
self._state.exit_status = 1
self._state.num_tests_failed = 0
def exit_code(self):
self._get_pidfile_info()
return self._state.exit_status
def num_tests_failed(self):
"""@returns The number of tests that failed or -1 if unknown."""
self._get_pidfile_info()
if self._state.num_tests_failed is None:
return -1
return self._state.num_tests_failed
def try_copy_results_on_drone(self, **kwargs):
if self.has_process():
# copy results logs into the normal place for job results
_drone_manager.copy_results_on_drone(self.get_process(), **kwargs)
def try_copy_to_results_repository(self, source, **kwargs):
if self.has_process():
_drone_manager.copy_to_results_repository(self.get_process(),
source, **kwargs)
class Agent(object):
"""
An agent for use by the Dispatcher class to perform a task.
The following methods are required on all task objects:
poll() - Called periodically to let the task check its status and
update its internal state. If the task succeeded.
is_done() - Returns True if the task is finished.
abort() - Called when an abort has been requested. The task must
set its aborted attribute to True if it actually aborted.
The following attributes are required on all task objects:
aborted - bool, True if this task was aborted.
success - bool, True if this task succeeded.
queue_entry_ids - A sequence of HostQueueEntry ids this task handles.
host_ids - A sequence of Host ids this task represents.
"""
def __init__(self, task):
"""
@param task: A task as described in the class docstring.
"""
self.task = task
# This is filled in by Dispatcher.add_agent()
self.dispatcher = None
self.queue_entry_ids = task.queue_entry_ids
self.host_ids = task.host_ids
self.started = False
self.finished = False
def tick(self):
self.started = True
if not self.finished:
self.task.poll()
if self.task.is_done():
self.finished = True
def is_done(self):
return self.finished
def abort(self):
if self.task:
self.task.abort()
if self.task.aborted:
# tasks can choose to ignore aborts
self.finished = True
class AgentTask(object):
class _NullMonitor(object):
pidfile_id = None
def has_process(self):
return True
def __init__(self, log_file_name=None):
"""
@param log_file_name: (optional) name of file to log command output to
"""
self.done = False
self.started = False
self.success = None
self.aborted = False
self.monitor = None
self.queue_entry_ids = []
self.host_ids = []
self._log_file_name = log_file_name
def _set_ids(self, host=None, queue_entries=None):
if queue_entries and queue_entries != [None]:
self.host_ids = [entry.host.id for entry in queue_entries]
self.queue_entry_ids = [entry.id for entry in queue_entries]
else:
assert host
self.host_ids = [host.id]
def poll(self):
if not self.started:
self.start()
if not self.done:
self.tick()
def tick(self):
assert self.monitor
exit_code = self.monitor.exit_code()
if exit_code is None:
return
success = (exit_code == 0)
self.finished(success)
def is_done(self):
return self.done
def finished(self, success):
if self.done:
assert self.started
return
self.started = True
self.done = True
self.success = success
self.epilog()
def prolog(self):
"""
To be overridden.
"""
assert not self.monitor
self.register_necessary_pidfiles()
def _log_file(self):
if not self._log_file_name:
return None
return os.path.join(self._working_directory(), self._log_file_name)
def cleanup(self):
log_file = self._log_file()
if self.monitor and log_file:
self.monitor.try_copy_to_results_repository(log_file)
def epilog(self):
"""
To be overridden.
"""
self.cleanup()
logging.info("%s finished with success=%s", type(self).__name__,
self.success)
def start(self):
if not self.started:
self.prolog()
self.run()
self.started = True
def abort(self):
if self.monitor:
self.monitor.kill()
self.done = True
self.aborted = True
self.cleanup()
def _get_consistent_execution_path(self, execution_entries):
first_execution_path = execution_entries[0].execution_path()
for execution_entry in execution_entries[1:]:
assert execution_entry.execution_path() == first_execution_path, (
'%s (%s) != %s (%s)' % (execution_entry.execution_path(),
execution_entry,
first_execution_path,
execution_entries[0]))
return first_execution_path
def _copy_results(self, execution_entries, use_monitor=None):
"""
@param execution_entries: list of objects with execution_path() method
"""
if use_monitor is not None and not use_monitor.has_process():
return
assert len(execution_entries) > 0
if use_monitor is None:
assert self.monitor
use_monitor = self.monitor
assert use_monitor.has_process()
execution_path = self._get_consistent_execution_path(execution_entries)
results_path = execution_path + '/'
use_monitor.try_copy_to_results_repository(results_path)
def _parse_results(self, queue_entries):
for queue_entry in queue_entries:
queue_entry.set_status(models.HostQueueEntry.Status.PARSING)
def _archive_results(self, queue_entries):
for queue_entry in queue_entries:
queue_entry.set_status(models.HostQueueEntry.Status.ARCHIVING)
def _command_line(self):
"""
Return the command line to run. Must be overridden.
"""
raise NotImplementedError
@property
def num_processes(self):
"""
Return the number of processes forked by this AgentTask's process. It
may only be approximate. To be overridden if necessary.
"""
return 1
def _paired_with_monitor(self):
"""
If this AgentTask's process must run on the same machine as some
previous process, this method should be overridden to return a
PidfileRunMonitor for that process.
"""
return self._NullMonitor()
@property
def owner_username(self):
"""
Return login of user responsible for this task. May be None. Must be
overridden.
"""
raise NotImplementedError
def _working_directory(self):
"""
Return the directory where this AgentTask's process executes. Must be
overridden.
"""
raise NotImplementedError
def _pidfile_name(self):
"""
Return the name of the pidfile this AgentTask's process uses. To be
overridden if necessary.
"""
return drone_manager.AUTOSERV_PID_FILE
def _check_paired_results_exist(self):
if not self._paired_with_monitor().has_process():
email_manager.manager.enqueue_notify_email(
'No paired results in task',
'No paired results in task %s at %s'
% (self, self._paired_with_monitor().pidfile_id))
self.finished(False)
return False
return True
def _create_monitor(self):
assert not self.monitor
self.monitor = PidfileRunMonitor()
def run(self):
if not self._check_paired_results_exist():
return
self._create_monitor()
self.monitor.run(
self._command_line(), self._working_directory(),
num_processes=self.num_processes,
nice_level=AUTOSERV_NICE_LEVEL, log_file=self._log_file(),
pidfile_name=self._pidfile_name(),
paired_with_pidfile=self._paired_with_monitor().pidfile_id,
username=self.owner_username,
drone_hostnames_allowed=self.get_drone_hostnames_allowed())
def get_drone_hostnames_allowed(self):
if not models.DroneSet.drone_sets_enabled():
return None
hqes = models.HostQueueEntry.objects.filter(id__in=self.queue_entry_ids)
if not hqes:
# Only special tasks could be missing host queue entries
assert isinstance(self, SpecialAgentTask)
return self._user_or_global_default_drone_set(
self.task, self.task.requested_by)
job_ids = hqes.values_list('job', flat=True).distinct()
assert job_ids.count() == 1, ("AgentTask's queue entries "
"span multiple jobs")
job = models.Job.objects.get(id=job_ids[0])
drone_set = job.drone_set
if not drone_set:
return self._user_or_global_default_drone_set(job, job.user())
return drone_set.get_drone_hostnames()
def _user_or_global_default_drone_set(self, obj_with_owner, user):
"""
Returns the user's default drone set, if present.
Otherwise, returns the global default drone set.
"""
default_hostnames = models.DroneSet.get_default().get_drone_hostnames()
if not user:
logging.warn('%s had no owner; using default drone set',
obj_with_owner)
return default_hostnames
if not user.drone_set:
logging.warn('User %s has no default drone set, using global '
'default', user.login)
return default_hostnames
return user.drone_set.get_drone_hostnames()
def register_necessary_pidfiles(self):
pidfile_id = _drone_manager.get_pidfile_id_from(
self._working_directory(), self._pidfile_name())
_drone_manager.register_pidfile(pidfile_id)
paired_pidfile_id = self._paired_with_monitor().pidfile_id
if paired_pidfile_id:
_drone_manager.register_pidfile(paired_pidfile_id)
def recover(self):
if not self._check_paired_results_exist():
return
self._create_monitor()
self.monitor.attach_to_existing_process(
self._working_directory(), pidfile_name=self._pidfile_name(),
num_processes=self.num_processes)
if not self.monitor.has_process():
# no process to recover; wait to be started normally
self.monitor = None
return
self.started = True
logging.info('Recovering process %s for %s at %s'
% (self.monitor.get_process(), type(self).__name__,
self._working_directory()))
def _check_queue_entry_statuses(self, queue_entries, allowed_hqe_statuses,
allowed_host_statuses=None):
class_name = self.__class__.__name__
for entry in queue_entries:
if entry.status not in allowed_hqe_statuses:
raise SchedulerError('%s attempting to start '
'entry with invalid status %s: %s'
% (class_name, entry.status, entry))
invalid_host_status = (
allowed_host_statuses is not None
and entry.host.status not in allowed_host_statuses)
if invalid_host_status:
raise SchedulerError('%s attempting to start on queue '
'entry with invalid host status %s: %s'
% (class_name, entry.host.status, entry))
class TaskWithJobKeyvals(object):
"""AgentTask mixin providing functionality to help with job keyval files."""
_KEYVAL_FILE = 'keyval'
def _format_keyval(self, key, value):
return '%s=%s' % (key, value)
def _keyval_path(self):
"""Subclasses must override this"""
raise NotImplementedError
def _write_keyval_after_job(self, field, value):
assert self.monitor
if not self.monitor.has_process():
return
_drone_manager.write_lines_to_file(
self._keyval_path(), [self._format_keyval(field, value)],
paired_with_process=self.monitor.get_process())
def _job_queued_keyval(self, job):
return 'job_queued', int(time.mktime(job.created_on.timetuple()))
def _write_job_finished(self):
self._write_keyval_after_job("job_finished", int(time.time()))
def _write_keyvals_before_job_helper(self, keyval_dict, keyval_path):
keyval_contents = '\n'.join(self._format_keyval(key, value)
for key, value in keyval_dict.iteritems())
# always end with a newline to allow additional keyvals to be written
keyval_contents += '\n'
_drone_manager.attach_file_to_execution(self._working_directory(),
keyval_contents,
file_path=keyval_path)
def _write_keyvals_before_job(self, keyval_dict):
self._write_keyvals_before_job_helper(keyval_dict, self._keyval_path())
def _write_host_keyvals(self, host):
keyval_path = os.path.join(self._working_directory(), 'host_keyvals',
host.hostname)
platform, all_labels = host.platform_and_labels()
all_labels = [ urllib.quote(label) for label in all_labels ]
keyval_dict = dict(platform=platform, labels=','.join(all_labels))
self._write_keyvals_before_job_helper(keyval_dict, keyval_path)
class SpecialAgentTask(AgentTask, TaskWithJobKeyvals):
"""
Subclass for AgentTasks that correspond to a SpecialTask entry in the DB.
"""
TASK_TYPE = None
host = None
queue_entry = None
def __init__(self, task, extra_command_args):
super(SpecialAgentTask, self).__init__()
assert self.TASK_TYPE is not None, 'self.TASK_TYPE must be overridden'
self.host = scheduler_models.Host(id=task.host.id)
self.queue_entry = None
if task.queue_entry:
self.queue_entry = scheduler_models.HostQueueEntry(
id=task.queue_entry.id)
self.task = task
self._extra_command_args = extra_command_args
def _keyval_path(self):
return os.path.join(self._working_directory(), self._KEYVAL_FILE)
def _command_line(self):
return _autoserv_command_line(self.host.hostname,
self._extra_command_args,
queue_entry=self.queue_entry)
def _working_directory(self):
return self.task.execution_path()
@property
def owner_username(self):
if self.task.requested_by:
return self.task.requested_by.login
return None
def prolog(self):
super(SpecialAgentTask, self).prolog()
self.task.activate()
self._write_host_keyvals(self.host)
def _fail_queue_entry(self):
assert self.queue_entry
if self.queue_entry.meta_host:
return # don't fail metahost entries, they'll be reassigned
self.queue_entry.update_from_database()
if self.queue_entry.status != models.HostQueueEntry.Status.QUEUED:
return # entry has been aborted
self.queue_entry.set_execution_subdir()
queued_key, queued_time = self._job_queued_keyval(
self.queue_entry.job)
self._write_keyval_after_job(queued_key, queued_time)
self._write_job_finished()
# copy results logs into the normal place for job results
self.monitor.try_copy_results_on_drone(
source_path=self._working_directory() + '/',
destination_path=self.queue_entry.execution_path() + '/')
pidfile_id = _drone_manager.get_pidfile_id_from(
self.queue_entry.execution_path(),
pidfile_name=drone_manager.AUTOSERV_PID_FILE)
_drone_manager.register_pidfile(pidfile_id)
if self.queue_entry.job.parse_failed_repair:
self._parse_results([self.queue_entry])
else:
self._archive_results([self.queue_entry])
def cleanup(self):
super(SpecialAgentTask, self).cleanup()
# We will consider an aborted task to be "Failed"
self.task.finish(bool(self.success))
if self.monitor:
if self.monitor.has_process():
self._copy_results([self.task])
if self.monitor.pidfile_id is not None:
_drone_manager.unregister_pidfile(self.monitor.pidfile_id)
class RepairTask(SpecialAgentTask):
TASK_TYPE = models.SpecialTask.Task.REPAIR
def __init__(self, task):
"""\
queue_entry: queue entry to mark failed if this repair fails.
"""
protection = host_protections.Protection.get_string(
task.host.protection)
# normalize the protection name
protection = host_protections.Protection.get_attr_name(protection)
super(RepairTask, self).__init__(
task, ['-R', '--host-protection', protection])
# *don't* include the queue entry in IDs -- if the queue entry is
# aborted, we want to leave the repair task running
self._set_ids(host=self.host)
def prolog(self):
super(RepairTask, self).prolog()
logging.info("repair_task starting")
self.host.set_status(models.Host.Status.REPAIRING)
def epilog(self):
super(RepairTask, self).epilog()
if self.success:
self.host.set_status(models.Host.Status.READY)
else:
self.host.set_status(models.Host.Status.REPAIR_FAILED)
if self.queue_entry:
self._fail_queue_entry()
class PreJobTask(SpecialAgentTask):
def _copy_to_results_repository(self):
if not self.queue_entry or self.queue_entry.meta_host:
return
self.queue_entry.set_execution_subdir()
log_name = os.path.basename(self.task.execution_path())
source = os.path.join(self.task.execution_path(), 'debug',
'autoserv.DEBUG')
destination = os.path.join(
self.queue_entry.execution_path(), log_name)
self.monitor.try_copy_to_results_repository(
source, destination_path=destination)
def epilog(self):
super(PreJobTask, self).epilog()
if self.success:
return
self._copy_to_results_repository()
if self.host.protection == host_protections.Protection.DO_NOT_VERIFY:
# effectively ignore failure for these hosts
self.success = True
return
if self.queue_entry:
self.queue_entry.requeue()
if models.SpecialTask.objects.filter(
task=models.SpecialTask.Task.REPAIR,
queue_entry__id=self.queue_entry.id):
self.host.set_status(models.Host.Status.REPAIR_FAILED)
self._fail_queue_entry()
return
queue_entry = models.HostQueueEntry.objects.get(
id=self.queue_entry.id)
else:
queue_entry = None
models.SpecialTask.objects.create(
host=models.Host.objects.get(id=self.host.id),
task=models.SpecialTask.Task.REPAIR,
queue_entry=queue_entry,
requested_by=self.task.requested_by)
class VerifyTask(PreJobTask):
TASK_TYPE = models.SpecialTask.Task.VERIFY
def __init__(self, task):
super(VerifyTask, self).__init__(task, ['-v'])
self._set_ids(host=self.host, queue_entries=[self.queue_entry])
def prolog(self):
super(VerifyTask, self).prolog()
logging.info("starting verify on %s", self.host.hostname)
if self.queue_entry:
self.queue_entry.set_status(models.HostQueueEntry.Status.VERIFYING)
self.host.set_status(models.Host.Status.VERIFYING)
# Delete any queued manual reverifies for this host. One verify will do
# and there's no need to keep records of other requests.
queued_verifies = models.SpecialTask.objects.filter(
host__id=self.host.id,
task=models.SpecialTask.Task.VERIFY,
is_active=False, is_complete=False, queue_entry=None)
queued_verifies = queued_verifies.exclude(id=self.task.id)
queued_verifies.delete()
def epilog(self):
super(VerifyTask, self).epilog()
if self.success:
if self.queue_entry:
self.queue_entry.on_pending()
else:
self.host.set_status(models.Host.Status.READY)
class CleanupTask(PreJobTask):
# note this can also run post-job, but when it does, it's running standalone
# against the host (not related to the job), so it's not considered a
# PostJobTask
TASK_TYPE = models.SpecialTask.Task.CLEANUP
def __init__(self, task, recover_run_monitor=None):
super(CleanupTask, self).__init__(task, ['--cleanup'])
self._set_ids(host=self.host, queue_entries=[self.queue_entry])
def prolog(self):
super(CleanupTask, self).prolog()
logging.info("starting cleanup task for host: %s", self.host.hostname)
self.host.set_status(models.Host.Status.CLEANING)
if self.queue_entry:
self.queue_entry.set_status(models.HostQueueEntry.Status.VERIFYING)
def _finish_epilog(self):
if not self.queue_entry or not self.success:
return
do_not_verify_protection = host_protections.Protection.DO_NOT_VERIFY
should_run_verify = (
self.queue_entry.job.run_verify
and self.host.protection != do_not_verify_protection)
if should_run_verify:
entry = models.HostQueueEntry.objects.get(id=self.queue_entry.id)
models.SpecialTask.objects.create(
host=models.Host.objects.get(id=self.host.id),
queue_entry=entry,
task=models.SpecialTask.Task.VERIFY)
else:
self.queue_entry.on_pending()
def epilog(self):
super(CleanupTask, self).epilog()
if self.success:
self.host.update_field('dirty', 0)
self.host.set_status(models.Host.Status.READY)
self._finish_epilog()
class AbstractQueueTask(AgentTask, TaskWithJobKeyvals):
"""
Common functionality for QueueTask and HostlessQueueTask
"""
def __init__(self, queue_entries):
super(AbstractQueueTask, self).__init__()
self.job = queue_entries[0].job
self.queue_entries = queue_entries
def _keyval_path(self):
return os.path.join(self._working_directory(), self._KEYVAL_FILE)
def _write_control_file(self, execution_path):
control_path = _drone_manager.attach_file_to_execution(
execution_path, self.job.control_file)
return control_path
def _command_line(self):
execution_path = self.queue_entries[0].execution_path()
control_path = self._write_control_file(execution_path)
hostnames = ','.join(entry.host.hostname
for entry in self.queue_entries
if not entry.is_hostless())
execution_tag = self.queue_entries[0].execution_tag()
params = _autoserv_command_line(
hostnames,
['-P', execution_tag, '-n',
_drone_manager.absolute_path(control_path)],
job=self.job, verbose=False)
if not self.job.is_server_job():
params.append('-c')
return params
@property
def num_processes(self):
return len(self.queue_entries)
@property
def owner_username(self):
return self.job.owner
def _working_directory(self):
return self._get_consistent_execution_path(self.queue_entries)
def prolog(self):
queued_key, queued_time = self._job_queued_keyval(self.job)
keyval_dict = self.job.keyval_dict()
keyval_dict[queued_key] = queued_time
group_name = self.queue_entries[0].get_group_name()
if group_name:
keyval_dict['host_group_name'] = group_name
self._write_keyvals_before_job(keyval_dict)
for queue_entry in self.queue_entries:
queue_entry.set_status(models.HostQueueEntry.Status.RUNNING)
queue_entry.set_started_on_now()
def _write_lost_process_error_file(self):
error_file_path = os.path.join(self._working_directory(), 'job_failure')
_drone_manager.write_lines_to_file(error_file_path,
[_LOST_PROCESS_ERROR])
def _finish_task(self):
if not self.monitor:
return
self._write_job_finished()
if self.monitor.lost_process:
self._write_lost_process_error_file()
def _write_status_comment(self, comment):
_drone_manager.write_lines_to_file(
os.path.join(self._working_directory(), 'status.log'),
['INFO\t----\t----\t' + comment],
paired_with_process=self.monitor.get_process())
def _log_abort(self):
if not self.monitor or not self.monitor.has_process():
return
# build up sets of all the aborted_by and aborted_on values
aborted_by, aborted_on = set(), set()
for queue_entry in self.queue_entries:
if queue_entry.aborted_by:
aborted_by.add(queue_entry.aborted_by)
t = int(time.mktime(queue_entry.aborted_on.timetuple()))
aborted_on.add(t)
# extract some actual, unique aborted by value and write it out
# TODO(showard): this conditional is now obsolete, we just need to leave
# it in temporarily for backwards compatibility over upgrades. delete
# soon.
assert len(aborted_by) <= 1
if len(aborted_by) == 1:
aborted_by_value = aborted_by.pop()
aborted_on_value = max(aborted_on)
else:
aborted_by_value = 'autotest_system'
aborted_on_value = int(time.time())
self._write_keyval_after_job("aborted_by", aborted_by_value)
self._write_keyval_after_job("aborted_on", aborted_on_value)
aborted_on_string = str(datetime.datetime.fromtimestamp(
aborted_on_value))
self._write_status_comment('Job aborted by %s on %s' %
(aborted_by_value, aborted_on_string))
def abort(self):
super(AbstractQueueTask, self).abort()
self._log_abort()
self._finish_task()
def epilog(self):
super(AbstractQueueTask, self).epilog()
self._finish_task()
class QueueTask(AbstractQueueTask):
def __init__(self, queue_entries):
super(QueueTask, self).__init__(queue_entries)
self._set_ids(queue_entries=queue_entries)
def prolog(self):
self._check_queue_entry_statuses(
self.queue_entries,
allowed_hqe_statuses=(models.HostQueueEntry.Status.STARTING,
models.HostQueueEntry.Status.RUNNING),
allowed_host_statuses=(models.Host.Status.PENDING,
models.Host.Status.RUNNING))
super(QueueTask, self).prolog()
for queue_entry in self.queue_entries:
self._write_host_keyvals(queue_entry.host)
queue_entry.host.set_status(models.Host.Status.RUNNING)
queue_entry.host.update_field('dirty', 1)
if self.job.synch_count == 1 and len(self.queue_entries) == 1:
# TODO(gps): Remove this if nothing needs it anymore.
# A potential user is: tko/parser
self.job.write_to_machines_file(self.queue_entries[0])
def _finish_task(self):
super(QueueTask, self)._finish_task()
for queue_entry in self.queue_entries:
queue_entry.set_status(models.HostQueueEntry.Status.GATHERING)
queue_entry.host.set_status(models.Host.Status.RUNNING)
class HostlessQueueTask(AbstractQueueTask):
def __init__(self, queue_entry):
super(HostlessQueueTask, self).__init__([queue_entry])
self.queue_entry_ids = [queue_entry.id]
def prolog(self):
self.queue_entries[0].update_field('execution_subdir', 'hostless')
super(HostlessQueueTask, self).prolog()
def _finish_task(self):
super(HostlessQueueTask, self)._finish_task()
self.queue_entries[0].set_status(models.HostQueueEntry.Status.PARSING)
class PostJobTask(AgentTask):
def __init__(self, queue_entries, log_file_name):
super(PostJobTask, self).__init__(log_file_name=log_file_name)
self.queue_entries = queue_entries
self._autoserv_monitor = PidfileRunMonitor()
self._autoserv_monitor.attach_to_existing_process(
self._working_directory())
def _command_line(self):
if _testing_mode:
return 'true'
return self._generate_command(
_drone_manager.absolute_path(self._working_directory()))
def _generate_command(self, results_dir):
raise NotImplementedError('Subclasses must override this')
@property
def owner_username(self):
return self.queue_entries[0].job.owner
def _working_directory(self):
return self._get_consistent_execution_path(self.queue_entries)
def _paired_with_monitor(self):
return self._autoserv_monitor
def _job_was_aborted(self):
was_aborted = None
for queue_entry in self.queue_entries:
queue_entry.update_from_database()
if was_aborted is None: # first queue entry
was_aborted = bool(queue_entry.aborted)
elif was_aborted != bool(queue_entry.aborted): # subsequent entries
entries = ['%s (aborted: %s)' % (entry, entry.aborted)
for entry in self.queue_entries]
email_manager.manager.enqueue_notify_email(
'Inconsistent abort state',
'Queue entries have inconsistent abort state:\n' +
'\n'.join(entries))
# don't crash here, just assume true
return True
return was_aborted
def _final_status(self):
if self._job_was_aborted():
return models.HostQueueEntry.Status.ABORTED
# we'll use a PidfileRunMonitor to read the autoserv exit status
if self._autoserv_monitor.exit_code() == 0:
return models.HostQueueEntry.Status.COMPLETED
return models.HostQueueEntry.Status.FAILED
def _set_all_statuses(self, status):
for queue_entry in self.queue_entries:
queue_entry.set_status(status)
def abort(self):
# override AgentTask.abort() to avoid killing the process and ending
# the task. post-job tasks continue when the job is aborted.
pass
def _pidfile_label(self):
# '.autoserv_execute' -> 'autoserv'
return self._pidfile_name()[1:-len('_execute')]
class GatherLogsTask(PostJobTask):
"""
Task responsible for
* gathering uncollected logs (if Autoserv crashed hard or was killed)
* copying logs to the results repository
* spawning CleanupTasks for hosts, if necessary
* spawning a FinalReparseTask for the job
"""
def __init__(self, queue_entries, recover_run_monitor=None):
self._job = queue_entries[0].job
super(GatherLogsTask, self).__init__(
queue_entries, log_file_name='.collect_crashinfo.log')
self._set_ids(queue_entries=queue_entries)
def _generate_command(self, results_dir):
host_list = ','.join(queue_entry.host.hostname
for queue_entry in self.queue_entries)
return [_autoserv_path , '-p',
'--pidfile-label=%s' % self._pidfile_label(),
'--use-existing-results', '--collect-crashinfo',
'-m', host_list, '-r', results_dir]
@property
def num_processes(self):
return len(self.queue_entries)
def _pidfile_name(self):
return drone_manager.CRASHINFO_PID_FILE
def prolog(self):
self._check_queue_entry_statuses(
self.queue_entries,
allowed_hqe_statuses=(models.HostQueueEntry.Status.GATHERING,),
allowed_host_statuses=(models.Host.Status.RUNNING,))
super(GatherLogsTask, self).prolog()
def epilog(self):
super(GatherLogsTask, self).epilog()
self._parse_results(self.queue_entries)
self._reboot_hosts()
def _reboot_hosts(self):
if self._autoserv_monitor.has_process():
final_success = (self._final_status() ==
models.HostQueueEntry.Status.COMPLETED)
num_tests_failed = self._autoserv_monitor.num_tests_failed()
else:
final_success = False
num_tests_failed = 0
reboot_after = self._job.reboot_after
do_reboot = (
# always reboot after aborted jobs
self._final_status() == models.HostQueueEntry.Status.ABORTED
or reboot_after == model_attributes.RebootAfter.ALWAYS
or (reboot_after == model_attributes.RebootAfter.IF_ALL_TESTS_PASSED
and final_success and num_tests_failed == 0))
for queue_entry in self.queue_entries:
if do_reboot:
# don't pass the queue entry to the CleanupTask. if the cleanup
# fails, the job doesn't care -- it's over.
models.SpecialTask.objects.create(
host=models.Host.objects.get(id=queue_entry.host.id),
task=models.SpecialTask.Task.CLEANUP,
requested_by=self._job.owner_model())
else:
queue_entry.host.set_status(models.Host.Status.READY)
def run(self):
autoserv_exit_code = self._autoserv_monitor.exit_code()
# only run if Autoserv exited due to some signal. if we have no exit
# code, assume something bad (and signal-like) happened.
if autoserv_exit_code is None or os.WIFSIGNALED(autoserv_exit_code):
super(GatherLogsTask, self).run()
else:
self.finished(True)
class SelfThrottledPostJobTask(PostJobTask):
"""
Special AgentTask subclass that maintains its own global process limit.
"""
_num_running_processes = 0
@classmethod
def _increment_running_processes(cls):
cls._num_running_processes += 1
@classmethod
def _decrement_running_processes(cls):
cls._num_running_processes -= 1
@classmethod
def _max_processes(cls):
raise NotImplementedError
@classmethod
def _can_run_new_process(cls):
return cls._num_running_processes < cls._max_processes()
def _process_started(self):
return bool(self.monitor)
def tick(self):
# override tick to keep trying to start until the process count goes
# down and we can, at which point we revert to default behavior
if self._process_started():
super(SelfThrottledPostJobTask, self).tick()
else:
self._try_starting_process()
def run(self):
# override run() to not actually run unless we can
self._try_starting_process()
def _try_starting_process(self):
if not self._can_run_new_process():
return
# actually run the command
super(SelfThrottledPostJobTask, self).run()
if self._process_started():
self._increment_running_processes()
def finished(self, success):
super(SelfThrottledPostJobTask, self).finished(success)
if self._process_started():
self._decrement_running_processes()
class FinalReparseTask(SelfThrottledPostJobTask):
def __init__(self, queue_entries):
super(FinalReparseTask, self).__init__(queue_entries,
log_file_name='.parse.log')
# don't use _set_ids, since we don't want to set the host_ids
self.queue_entry_ids = [entry.id for entry in queue_entries]
def _generate_command(self, results_dir):
return [_parser_path, '--write-pidfile', '-l', '2', '-r', '-o',
results_dir]
@property
def num_processes(self):
return 0 # don't include parser processes in accounting
def _pidfile_name(self):
return drone_manager.PARSER_PID_FILE
@classmethod
def _max_processes(cls):
return scheduler_config.config.max_parse_processes
def prolog(self):
self._check_queue_entry_statuses(
self.queue_entries,
allowed_hqe_statuses=(models.HostQueueEntry.Status.PARSING,))
super(FinalReparseTask, self).prolog()
def epilog(self):
super(FinalReparseTask, self).epilog()
self._archive_results(self.queue_entries)
class ArchiveResultsTask(SelfThrottledPostJobTask):
_ARCHIVING_FAILED_FILE = '.archiver_failed'
def __init__(self, queue_entries):
super(ArchiveResultsTask, self).__init__(queue_entries,
log_file_name='.archiving.log')
# don't use _set_ids, since we don't want to set the host_ids
self.queue_entry_ids = [entry.id for entry in queue_entries]
def _pidfile_name(self):
return drone_manager.ARCHIVER_PID_FILE
def _generate_command(self, results_dir):
return [_autoserv_path , '-p',
'--pidfile-label=%s' % self._pidfile_label(), '-r', results_dir,
'--use-existing-results', '--control-filename=control.archive',
os.path.join(drones.AUTOTEST_INSTALL_DIR, 'scheduler',
'archive_results.control.srv')]
@classmethod
def _max_processes(cls):
return scheduler_config.config.max_transfer_processes
def prolog(self):
self._check_queue_entry_statuses(
self.queue_entries,
allowed_hqe_statuses=(models.HostQueueEntry.Status.ARCHIVING,))
super(ArchiveResultsTask, self).prolog()
def epilog(self):
super(ArchiveResultsTask, self).epilog()
if not self.success and self._paired_with_monitor().has_process():
failed_file = os.path.join(self._working_directory(),
self._ARCHIVING_FAILED_FILE)
paired_process = self._paired_with_monitor().get_process()
_drone_manager.write_lines_to_file(
failed_file, ['Archiving failed with exit code %s'
% self.monitor.exit_code()],
paired_with_process=paired_process)
self._set_all_statuses(self._final_status())
if __name__ == '__main__':
main()
| ceph/autotest | scheduler/monitor_db.py | Python | gpl-2.0 | 93,396 |
#------------------------------------------------------------------------------
# Copyright 2013 Esri
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#------------------------------------------------------------------------------
# for line features, finds the bearing angle from the first point to the last point
import os, sys, traceback, math
import arcpy
def Geo2Arithmetic(inAngle):
inAngle = math.fmod(inAngle,360.0)
#0 to 90
if (inAngle >= 0.0 and inAngle <= 90.0):
outAngle = math.fabs(inAngle - 90.0)
# 90 to 360
if (inAngle >= 90.0 and inAngle < 360.0):
outAngle = 360.0 - (inAngle - 90.0)
return float(outAngle)
inputFeatures = arcpy.GetParameterAsText(0) # C:\Workspace\ArcGIS Defense 10.1\path slope\default.gdb\roads
inputAngleField = arcpy.GetParameterAsText(1) # aoo
deleteme = []
debug = False
try:
arcpy.AddMessage("Updating " + inputAngleField + " field for " + str(arcpy.GetCount_management(inputFeatures).getOutput(0)) + " rows ...")
with arcpy.da.UpdateCursor(inputFeatures,["OID@","SHAPE@",inputAngleField]) as rows:
for row in rows:
angle = None
geometry = row[1] # firstPoint, lastPoint
firstPoint = geometry.firstPoint
lastPoint = geometry.lastPoint
xdiff = (lastPoint.X - firstPoint.X)
ydiff = (lastPoint.Y - firstPoint.Y)
#distance = math.sqrt(math.pow(xdiff,2.0) + math.pow(ydiff,2.0))
# Convert from quadrants to arithmetic
if (xdiff == 0.0 and ydiff > 0.0):
# vertical line, slope infinity
angle = 90.0
if (xdiff == 0.0 and ydiff < 0.0):
# vertical line, slope infinity
angle = 270.0
if (xdiff > 0.0 and ydiff == 0.0):
angle = 0.0
if (xdiff < 0.0 and ydiff == 0.0):
angle = 180.0
if (xdiff > 0.0 and ydiff > 0.0): # Quadrant I (+,+)
angle = math.degrees(math.atan(ydiff/xdiff))
if (xdiff < 0.0 and ydiff > 0.0): # Quadrant II (-,+)
angle = 180.0 - math.fabs(math.degrees(math.atan(ydiff/xdiff)))
if (xdiff < 0.0 and ydiff < 0.0): # Quadrant III (-,-)
angle = 180.0 + math.fabs(math.degrees(math.atan(ydiff/xdiff)))
if (xdiff > 0.0 and ydiff < 0.0): # Quadrant IV (+,-)
angle = 360.0 - math.fabs(math.degrees(math.atan(ydiff/xdiff)))
#if debug == True: arcpy.AddMessage(str(xdiff) + " -- " + str(angle) + " -- " + str(ydiff))
if not angle == None:
row[2] = Geo2Arithmetic(angle)
else:
arcpy.AddWarning("Empty angle for feature " + str(row[0]) + ". This could be a closed loop feature.")
row[2] = None
#if debug == True: arcpy.AddMessage(" " + str(row))
rows.updateRow(row)
arcpy.SetParameter(2,inputFeatures)
except arcpy.ExecuteError:
# Get the tool error messages
msgs = arcpy.GetMessages()
arcpy.AddError(msgs)
#print msgs #UPDATE
print (msgs)
except:
# Get the traceback object
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
# Concatenate information together concerning the error into a message string
pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
# Return python error messages for use in script tool or Python Window
arcpy.AddError(pymsg)
arcpy.AddError(msgs)
# Print Python error messages for use in Python / Python Window
#print pymsg + "\n" UPDATE
print ((pymsg + "\n"))
#print msgs #UPDATE
print (msgs)
finally:
# cleanup intermediate datasets
if debug == True: arcpy.AddMessage("Removing intermediate datasets...")
for i in deleteme:
if debug == True: arcpy.AddMessage("Removing: " + str(i))
arcpy.Delete_management(i)
if debug == True: arcpy.AddMessage("Done")
| JudTown17/solutions-geoprocessing-toolbox | data_management/toolboxes/scripts/LineFeatureAngle.py | Python | apache-2.0 | 4,563 |
#!/usr/bin/env python3
#
# PLASMA : Generate an indented asm code (pseudo-C) with colored syntax.
# Copyright (C) 2015 Joel
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
from argparse import ArgumentParser
import plasma
import plasma.lib.utils
import plasma.lib.colors
from plasma.lib.database import Database
from plasma.lib.disassembler import Disassembler, NB_LINES_TO_DISASM
from plasma.lib.utils import die, error, debug__
from plasma.lib.generate_ast import generate_ast
from plasma.lib.exceptions import ExcArch, ExcFileFormat, ExcIfelse, ExcPEFail
#
# The global context variable is always named as gctx
#
class GlobalContext():
def __init__(self):
# TODO : let globally ?
plasma.lib.utils.gctx = self
plasma.lib.colors.gctx = self
self.comments = True # always True, will be removed
# For info() messages
self.quiet = False
self.is_interactive = False
# Command line options
self.print_andif = True
self.color = True
self.max_data_size = 30
self.filename = None
self.syms = False
self.calls_in_section = None
self.entry = None # string : symbol | EP | 0xNNNN
self.do_dump = False
self.vim = False
self.nb_lines = 30
self.graph = False # Print graph != gph -> object
self.interactive_mode = False
self.debug = False
self.raw_base = 0
self.raw_big_endian = False
self.list_sections = False
self.print_bytes = False
self.raw_type = None
self.print_data = False
self.capstone_string = 0 # See lib.ui.visual.main_cmd_inst_output
self.show_mangling = True
self.autoanalyzer = True
self.debugsp = False
# Built objects
self.dis = None # Disassembler
self.libarch = None # module lib.arch.<BIN_ARCH>
self.db = None # Database
self.api = None # Api
def parse_args(self):
parser = ArgumentParser(description=
'Reverse engineering for x86/ARM/MIPS binaries. Generation of pseudo-C. '
'Supported formats : ELF, PE. More commands available in the interactive'
' mode. https://github.com/joelpx/plasma')
parser.add_argument('filename', nargs='?', metavar='FILENAME')
parser.add_argument('-nc', '--nocolor', action='store_true')
parser.add_argument('-g', '--graph', action='store_true',
help='Generate a file graph.dot.')
parser.add_argument('--noandif', action='store_true',
help="Print normal 'if' instead of 'andif'")
parser.add_argument('--datasize', type=int, default=30, metavar='N',
help='default 30, maximum of chars to display for strings or bytes array.')
parser.add_argument('-x', '--entry', metavar='SYMBOLNAME|0xXXXXX|EP',
help='Pseudo-decompilation, default is main. EP stands for entry point.')
parser.add_argument('--vim', action='store_true',
help='Generate syntax colors for vim')
parser.add_argument('-s', '--symbols', action='store_true',
help='Print all symbols')
parser.add_argument('--sections', action='store_true',
help='Print all sections')
parser.add_argument('--dump', action='store_true',
help='Dump asm without decompilation')
parser.add_argument('-l', '--lines', type=int, default=30, metavar='N',
help='Max lines used with --dump')
parser.add_argument('--nbytes', type=int, default=0, metavar='N',
help='Print n bytes.')
parser.add_argument('-i', '--interactive', action='store_true',
help='Interactive mode')
parser.add_argument('-d', '--opt_debug', action='store_true')
parser.add_argument('--raw', metavar='x86|x64|arm|mips|mips64',
help='Consider the input file as a raw binary')
parser.add_argument('--rawbase', metavar='0xXXXXX',
help='Set base address of a raw file (default=0)')
parser.add_argument('--rawbe', action='store_true',
help='If not set it\'s in little endian')
parser.add_argument('-na', '--noautoanalyzer', action='store_true',
help='Disable analysis on the entry point / symbols and don\'t scan memmory. You can force it with the command push_analyze_symbols.')
parser.add_argument('--debugsp', action='store_true',
help="Print the stack offset on each instructions. Warning: these values will not be saved in the database.")
args = parser.parse_args()
self.debug = args.opt_debug
self.print_andif = not args.noandif
self.color = not args.nocolor
self.max_data_size = args.datasize
self.filename = args.filename
self.raw_type = args.raw
self.raw_base = args.rawbase
self.syms = args.symbols
self.entry = args.entry
self.do_dump = args.dump
self.vim = args.vim
self.interactive_mode = args.interactive
self.nb_lines = args.lines
self.graph = args.graph
self.raw_big_endian = args.rawbe
self.list_sections = args.sections
self.autoanalyzer = not args.noautoanalyzer
self.debugsp = args.debugsp
if args.nbytes == 0:
self.nbytes = 4
self.print_bytes = False
else:
self.nbytes = int(args.nbytes)
self.print_bytes = True
if self.raw_base is not None:
try:
self.raw_base = int(self.raw_base, 16)
except:
error("--rawbase must be in hex format")
die()
else:
self.raw_base = 0
def load_file(self, filename=None):
if filename is None:
filename = self.filename
if not os.path.exists(filename):
error("file {self.filename} doesn't exist".format(self=self))
if self.interactive_mode:
return False
die()
if not os.path.isfile(filename):
error("this is not a file".format(self=self))
if self.interactive_mode:
return False
die()
self.db = Database()
self.db.load(filename)
if self.raw_base != 0:
self.db.raw_base = self.raw_base
if self.raw_type is not None:
self.db.raw_type = self.raw_type
if self.raw_big_endian is not None:
self.db.raw_is_big_endian = self.raw_big_endian
if self.db.loaded:
self.raw_base = self.db.raw_base
self.raw_type = self.db.raw_type
self.raw_big_endian = self.db.raw_is_big_endian
try:
dis = Disassembler(filename, self.raw_type,
self.raw_base, self.raw_big_endian,
self.db)
except ExcArch as e:
error("arch %s is not supported" % e.arch)
if self.interactive_mode:
return False
die()
except ExcFileFormat:
error("the file is not PE or ELF binary")
if self.interactive_mode:
return False
die()
except ExcPEFail as e:
error(str(e.e))
error("it seems that there is a random bug in pefile, you shoul retry.")
error("please report here https://github.com/joelpx/plasma/issues/16")
if self.interactive_mode:
return False
die()
self.dis = dis
self.libarch = dis.load_arch_module()
return True
def get_addr_context(self, ad, quiet=False):
adctx = AddrContext(self)
if isinstance(ad, int):
adctx.entry = self.db.mem.get_head_addr(ad)
return adctx
ret = adctx.init_address(ad, quiet=quiet) # here ad is a string
if not ret:
return None
adctx.entry = self.db.mem.get_head_addr(adctx.entry)
return adctx
#
# This is a context for a disassembling at a specific address, it contains
# the graph, the output... It's always named as "ctx"
#
class AddrContext():
def __init__(self, gctx):
# TODO : let globally ?
plasma.lib.colors.ctx = self
self.gctx = gctx # Global context
self.entry = 0
self.addr_color = {}
self.color_counter = 112
self.seen = set()
# If an address of an instruction cmp is here, it means that we
# have fused with an if, so don't print this instruction.
self.all_fused_inst = set()
self.is_dump = False
self.gph = None
self.ast = None
def init_address(self, entry, quiet=False):
if isinstance(entry, int):
self.entry = entry
return True
if entry == "EP":
self.entry = self.gctx.dis.binary.get_entry_point()
return True
if entry is None:
if self.gctx.raw_type is not None:
self.entry = 0
return True
self.entry = self.gctx.db.symbols.get("main", None) or \
self.gctx.db.symbols.get("_main", None) or \
self.gctx.dis.binary.get_entry_point()
if self.entry is None:
error("symbol main or _main not found, try with EP")
if self.gctx.interactive_mode:
return False
die()
return True
is_hexa = entry.startswith("0x")
if not is_hexa and self.gctx.api.is_reserved_prefix(entry):
entry = entry[entry.index("_") + 1:]
is_hexa = True
if is_hexa:
try:
self.entry = int(entry, 16)
except:
if not quiet:
error("bad hexa string %s" % entry)
if self.gctx.interactive_mode:
return False
die()
return True
self.entry = self.gctx.db.demangled.get(entry, None) or \
self.gctx.db.symbols.get(entry, None) or \
self.gctx.dis.binary.section_names.get(entry, None)
if self.entry is None:
if not quiet:
error("symbol %s not found" % entry)
if self.gctx.interactive_mode:
return False
die()
return True
def decompile(self):
self.is_dump = False
self.gph, pe_nb_new_syms = self.gctx.dis.get_graph(self.entry)
if self.gph is None:
error("capstone can't disassemble here")
return None
self.gph.simplify()
if self.gctx.db.loaded and pe_nb_new_syms:
self.gctx.db.modified = True
try:
self.gph.loop_detection(self.entry)
ast, correctly_ended = generate_ast(self)
if not correctly_ended:
debug__("Second try...")
self.gph.loop_detection(self.entry, True)
ast, _ = generate_ast(self)
self.ast = ast
except ExcIfelse as e:
error("can't have a ifelse here %x" % e.addr)
if self.gctx.interactive_mode:
return None
die()
o = self.gctx.libarch.output.Output(self)
o._ast(self.entry, ast)
self.output = o
return o
def dump_asm(self, lines=NB_LINES_TO_DISASM, until=-1):
self.is_dump = True
o = self.gctx.dis.dump_asm(self, lines=lines, until=until)
self.output = o
return o
def dump_xrefs(self):
self.is_dump = True
o = self.gctx.dis.dump_xrefs(self, self.entry)
self.output = o
return o
| chubbymaggie/reverse | plasma/lib/__init__.py | Python | gpl-3.0 | 12,594 |
import random
import discord
async def choose(cmd, message, args):
if args:
choice = random.choice(args)
embed = discord.Embed(color=0x1ABC9C, title=':thinking: I choose... ' + choice)
await message.channel.send(None, embed=embed)
else:
await message.channel.send(cmd.help())
return
| valeth/apex-sigma | sigma/plugins/utility/other/choose.py | Python | gpl-3.0 | 336 |
from django.conf import settings
from django.db import models
class UserRole(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, related_name='urole')
role = models.ForeignKey('arctic.Role')
class Meta:
swappable = 'ARCTIC_USER_ROLE_MODEL'
class Role(models.Model):
name = models.CharField('Role', max_length=100, unique=True)
is_active = models.BooleanField(default=True)
def __str__(self):
return self.name
class Meta:
swappable = 'ARCTIC_ROLE_MODEL'
| ddaan/django-arctic | arctic/models.py | Python | mit | 529 |
#!/usr/bin/env vpython3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import logging
import sys
import gold_inexact_matching.base_parameter_optimizer as base_optimizer
import gold_inexact_matching.binary_search_parameter_optimizer\
as binary_optimizer
import gold_inexact_matching.brute_force_parameter_optimizer as brute_optimizer
import gold_inexact_matching.local_minima_parameter_optimizer\
as local_optimizer
from gold_inexact_matching import optimizer_set
# Script to find suitable values for Skia Gold inexact matching.
#
# Inexact matching in Skia Gold has three tunable parameters:
# 1. The max number of differing pixels.
# 2. The max delta for any single pixel.
# 3. The threshold for a Sobel filter.
#
# Ideally, we use the following hierarchy of comparison approaches:
# 1. Exact matching.
# 2. Exact matching after a Sobel filter is applied.
# 3. Fuzzy matching after a Sobel filter is applied.
#
# However, there may be cases where only using a Sobel filter requires masking a
# very large amount of the image compared to Sobel + very conservative fuzzy
# matching.
#
# Even if such cases are not hit, the process of determining good values for the
# parameters is quite tedious since it requires downloading images from Gold and
# manually running multiple calls to `goldctl match`.
#
# This script attempts to remedy both issues by handling all of the trial and
# error and suggesting potential parameter values for the user to choose from.
def CreateArgumentParser():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
script_parser = parser.add_argument_group('Script Arguments')
script_parser.add_argument('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more')
subparsers = parser.add_subparsers(help='Optimization algorithm')
binary_parser = subparsers.add_parser(
'binary_search',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Perform a binary search to optimize a single parameter. The best '
'option if you only want to tune one parameter.')
binary_parser.set_defaults(
clazz=binary_optimizer.BinarySearchParameterOptimizer)
binary_optimizer.BinarySearchParameterOptimizer.AddArguments(binary_parser)
local_parser = subparsers.add_parser(
'local_minima',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Perform a BFS to find local minima using weights for each '
'parameter. Slower than binary searching, but supports an arbitrary '
'number of parameters.')
local_parser.set_defaults(clazz=local_optimizer.LocalMinimaParameterOptimizer)
local_optimizer.LocalMinimaParameterOptimizer.AddArguments(local_parser)
brute_parser = subparsers.add_parser(
'brute_force',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Brute force all possible combinations. VERY, VERY slow, but can '
'potentially find better values than local_minima.')
brute_parser.set_defaults(clazz=brute_optimizer.BruteForceParameterOptimizer)
brute_optimizer.BruteForceParameterOptimizer.AddArguments(brute_parser)
return parser
def SetLoggingVerbosity(args):
logger = logging.getLogger()
if args.verbose_count == 0:
logger.setLevel(logging.WARNING)
elif args.verbose_count == 1:
logger.setLevel(logging.INFO)
else:
logger.setLevel(logging.DEBUG)
def main():
parser = CreateArgumentParser()
args = parser.parse_args()
SetLoggingVerbosity(args)
optimizer = optimizer_set.OptimizerSet(args, args.clazz)
optimizer.RunOptimization()
return 0
if __name__ == '__main__':
sys.exit(main())
| ric2b/Vivaldi-browser | chromium/content/test/gpu/determine_gold_inexact_parameters.py | Python | bsd-3-clause | 4,018 |
# Copyright 2017, IBM US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
import logging
from openstack_dashboard.api import keystone
LOG = logging.getLogger(__name__)
class Backups(horizon.Panel):
name = _("Backups")
slug = 'backups'
def can_access(self, context):
if keystone.is_multi_domain_enabled() \
and not keystone.is_domain_admin(context['request']):
return False
return super(Backups, self).can_access(context)
@staticmethod
def can_register():
return keystone.VERSIONS.active >= 3
| open-power-ref-design-toolkit/os-services | osa/dbaas_ui/dbaas_ui/backups/panel.py | Python | apache-2.0 | 1,145 |
# Copyright 2013 Rackspace Australia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import voluptuous as v
from zuul.reporter import BaseReporter
class GerritReporter(BaseReporter):
"""Sends off reports to Gerrit."""
name = 'gerrit'
log = logging.getLogger("zuul.reporter.gerrit.Reporter")
def report(self, source, pipeline, item):
"""Send a message to gerrit."""
message = self._formatItemReport(pipeline, item)
self.log.debug("Report change %s, params %s, message: %s" %
(item.change, self.reporter_config, message))
changeid = '%s,%s' % (item.change.number, item.change.patchset)
item.change._ref_sha = source.getRefSha(
item.change.project.name, 'refs/heads/' + item.change.branch)
return self.connection.review(item.change.project.name, changeid,
message, self.reporter_config)
def getSubmitAllowNeeds(self):
"""Get a list of code review labels that are allowed to be
"needed" in the submit records for a change, with respect
to this queue. In other words, the list of review labels
this reporter itself is likely to set before submitting.
"""
return self.reporter_config
def getSchema():
gerrit_reporter = v.Any(str, v.Schema({}, extra=True))
return gerrit_reporter
| wikimedia/integration-zuul | zuul/reporter/gerrit.py | Python | apache-2.0 | 1,897 |
# Copyright 2019 Verily Life Sciences LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for model lib."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from classifaedes import hparams_lib
from classifaedes import model_lib
import tensorflow.compat.v1 as tf
from tensorflow.contrib import learn as contrib_learn
@parameterized.named_parameters(
('Defaults', hparams_lib.defaults()),
('InceptionV3b', hparams_lib.defaults().override_from_dict({
'arch': 'inception_v3b'
})),
)
class ModelLibTest(tf.test.TestCase):
def setUp(self):
super(ModelLibTest, self).setUp()
self._input_md = {
'num_examples_negative': 182580,
'num_examples_positive': 2050118,
}
def _input_fn(self, batch_size):
targets = tf.random_uniform([batch_size], dtype=tf.int32, maxval=2, seed=1)
images = tf.random_uniform([batch_size, 120, 130, 1])
return {'images': images}, tf.equal(1, targets)
def testBuildTrainGraph(self, hps):
batch_size = hps.batch_size
with tf.Graph().as_default():
inputs, targets = self._input_fn(batch_size)
model_fn = model_lib.build_model_fn(hps, self._input_md)
probabilities, loss, train_op = model_fn(inputs, targets,
contrib_learn.ModeKeys.TRAIN)
self.assertEqual(probabilities['outputs'].dtype, tf.float32)
self.assertEqual(loss.dtype, tf.float32)
self.assertIsNotNone(train_op)
self.assertEqual(probabilities['outputs'].shape, [batch_size])
self.assertEqual(loss.shape, [])
def testBuildEvalGraph(self, hps):
batch_size = hps.batch_size
with tf.Graph().as_default():
inputs, targets = self._input_fn(batch_size)
model_fn = model_lib.build_model_fn(hps, self._input_md)
probabilities, loss, train_op = model_fn(inputs, targets,
contrib_learn.ModeKeys.EVAL)
self.assertEqual(probabilities['outputs'].dtype, tf.float32)
self.assertEqual(loss.dtype, tf.float32)
self.assertIsNone(train_op)
self.assertEqual(probabilities['outputs'].shape, [batch_size])
self.assertEqual(loss.shape, [])
def testRunTrainGraph(self, hps):
with self.test_session() as sess:
inputs, targets = self._input_fn(hps.batch_size)
model_fn = model_lib.build_model_fn(hps, self._input_md)
probabilities_tensor, loss_tensor, train_op = model_fn(
inputs, targets, contrib_learn.ModeKeys.TRAIN)
tf.global_variables_initializer().run()
sess.run(train_op)
sess.run([probabilities_tensor, loss_tensor])
def testRunEvalGraph(self, hps):
with self.test_session() as sess:
inputs, targets = self._input_fn(hps.batch_size)
model_fn = model_lib.build_model_fn(hps, self._input_md)
probabilities_tensor, loss_tensor, _ = model_fn(
inputs, targets, contrib_learn.ModeKeys.EVAL)
tf.global_variables_initializer().run()
sess.run([probabilities_tensor, loss_tensor])
if __name__ == '__main__':
tf.test.main()
| verilylifesciences/classifaedes | classifaedes/model_lib_test.py | Python | apache-2.0 | 3,651 |
#########################################################################
# This script is provided for
#
# Chen W and De Schutter E (2017) Parallel STEPS: Large Scale Stochastic Spatial Reaction-Diffusion Simulation with High Performance Computers. Front. Neuroinform. 11:13. doi: 10.3389/fninf.2017.00013
#
##########################################################################
import steps.interface
from steps.geom import *
import os
from subprocess import call
os.makedirs("meshes/partition", exist_ok=True)
MESH_FILE = "meshes/fullcell.inp"
mesh = TetMesh.LoadAbaqus(MESH_FILE, scale=1e-06)
mesh.ConvertToMetis('meshes/partition/fullcell.metis')
print("Generate partition for desktop computer (from 2 cores to 10 cores)")
for i in range(2, 11, 2):
call(['mpmetis', '-ncommon=3', '-minconn', '-niter=1000', 'meshes/partition/fullcell.metis', '%i' % (i)])
metis_part = MetisPartition(mesh, 'meshes/partition/fullcell.metis.epart.%i' % (i), default_tris=mesh.surface)
metis_part.printStats()
print("Generate partition for supercomputer (from 100 cores to 2000 cores)")
for i in range(2100, 5001, 100):
call(['mpmetis', '-ncommon=3', '-minconn', '-niter=1000', 'meshes/partition/fullcell.metis', '%i' % (i)])
metis_part = MetisPartition(mesh, 'meshes/partition/fullcell.metis.epart.%i' % (i), default_tris=mesh.surface)
metis_part.printStats()
| CNS-OIST/STEPS_Example | publication_models/API_2/Chen_FNeuroinf__2017/purkinje_model/fullcell_partition.py | Python | gpl-2.0 | 1,384 |
# -*- coding: gb2312 -*-
import sys
import re
import os
import string
import signal
import time
import codecs
import json
from ExcelTool import ExcelTool
from config import *
import functions
try:
import character
except:
character = functions
import xlsxtool
import xlsxError
import copy
SYS_CODE = sys.getdefaultencoding()
def siginit(sigNum, sigHandler):
print("byebye")
sys.exit(1)
signal.signal(signal.SIGINT, siginit) #Ctrl-c´¦Àí
def hasFunc(funcName):
return hasattr(character, funcName) or hasattr(functions, funcName)
def getFunc(funcName):
if hasattr(character, funcName):
return getattr(character, funcName)
return getattr(functions, funcName)
g_dctDatas = {}
g_fdatas = {}
class xlsx2py(object):
"""
½«excelÊý¾Ýµ¼³öΪpyÎļþ ʹÓùý³ÌÐèÒª½øÐбàÂëת»»
"""
def __init__(self, infile, outfile):
sys.excepthook = xlsxError.except_hook #traceback´¦Àí,Ï£ÍûÊä³öÖÐÎÄ
self.infile = os.path.abspath(infile) #ÔÝ´æexcelÎļþÃû
self.outfile = os.path.abspath(outfile) #dataÎļþÃû
return
def __initXlsx(self):
self.xbook = ExcelTool(self.infile)
while not self.xbook.getWorkbook(forcedClose = True):
xlsxtool.exportMenu(EXPORT_INFO_RTEXCEL, OCallback = self.resetXlsx)
def resetXlsx(self):
"""
ÊäÈëO(other)µÄ»Øµ÷
¹Ø±ÕÒÑ´ò¿ªµÄexcel£¬È»ºóÖØÐ´ò¿ª
"""
self.xbook.getWorkbook(forcedClose = True)
def __initInfo(self):
self.__exportSheetIndex = [] #´æ´¢¿Éµ¼±íµÄË÷Òý
self.headerDict = {} #µ¼³ö±íµÚÒ»ÐÐתΪ×Öµä
self.mapDict = {} #´ú¶Ô±íÉú³ÉµÄ×Öµä(µÚÒ»ÐÐÊÇ´ú¶Ô±í˵Ã÷ºöÂÔ)
#####################Ö´ÐÐÖ÷Ìâ##########################
def run(self):
"""
´øÓÐ$µÄÁÐÊý¾ÝÐèÒª´ú¶Ô±í,Ê×ÏÈÉú³É´ú¶Ô×Öµä
"""
self.__initXlsx() #³õʼexcelÏà¹Ø
self.__initInfo() #³õʼµ¼±íÏà¹Ø
self.openFile()
self.sth4Nth() #½øÈëÏÂÒ»¸ö½×¶Î
self.constructMapDict() #Éú³É´ú¶Ô×Öµä
self.__onRun()
def __onRun(self):
self.writeLines = 0 #¼Ç¼ÒÑдÈëµÄexcelµÄÐÐÊý
self.parseDefineLine() #·ÖÎöÎļþ
###############ѰÕÒ´ú¶Ô±íºÍ±ê¼Çµ¼ÈëµÄ±í##################
def sth4Nth(self):
"""
something for nothing, ´ú¶Ô±íºÍµ¼Èë±íÐèÒªÓÐ
"""
for index in range(1, self.xbook.getSheetCount() + 1):
sheetName = self.xbook.getSheetNameByIndex(index)
if sheetName == EXPORT_MAP_SHEET:
self.__onFindMapSheet(index)
if sheetName.startswith(EXPORT_PREFIX_CHAR):
self.__onFindExportSheet(index)
self.onSth4Nth()
def onSth4Nth(self):
"""
"""
if not hasattr(self, 'mapIndex'):
self.xlsxClear(EXPORT_ERROR_NOMAP)
if len(self.__exportSheetIndex) == 0:
xlsxError.error_input(EXPORT_ERROR_NOSHEET)
return
def __onFindMapSheet(self, mapIndex):
self.mapIndex = mapIndex
return
def __onFindExportSheet(self, Eindex):
"""
Íê±Ï
"""
self.__exportSheetIndex.append(Eindex)
def constructMapDict(self):
"""
Éú³É´ú¶Ô×ֵ䣬 ´ú¶Ô±íÖ»ÓÐÒ»¸ö
"""
mapDict = {}
sheet = self.xbook.getSheetByIndex(self.mapIndex)
if not sheet:
return
for col in range(0, self.xbook.getRowCount(self.mapIndex)):
colValues = self.xbook.getColValues(sheet, col)
if colValues:
for v in [e for e in colValues[1:] if e[0] and isinstance(e[0], str) and e[0].strip()]:
print (v)
mapStr = v[0].replace('£º', ":") #ÖÐÎÄ"£º"ºÍ":"
try:
k, v = mapStr.split(":")
k = str.strip(k)
v = str.strip(v)
mapDict[k] = v
except Exception as errstr:
print( "waring£ºÐèÒª¼ì²é´ú¶Ô±í µÚ%dÁÐ, err=%s"%(col , errstr))
self.__onConstruct(mapDict)
return
def __onConstruct(self, mapDict):
"""
´ú¶Ô×ÖµäÉú³ÉÍê±Ï
"""
self.mapDict = mapDict
return
#####################ÎļþÍ·¼ì²â#######################
def parseDefineLine(self):
self.__checkDefine() #¼ì²é¶¨ÒåÊÇ·ñÕýÈ·
self.__checkData() #¼ì²éÊý¾ÝÊÇ·ñ·ûºÏ¹æÔò
def __reCheck(self, head):
pattern = "(\w+)(\[.*])(\[\w+\])"
reGroups =re.compile(pattern).match(head)
if not reGroups:
return ()
return reGroups.groups()
def __convertKeyName(self, name):
try:
tname = eval(name)
except:
pass
else:
if type(tname) == int or type(tname) == float:
return tname
return name
def __checkDefine(self):
"""
µÚÒ»ÐеĸöÔªËØÊÇ·ñ·ûºÏ¶¨Òå¸ñʽ"name[signs][func]"ÒÔ¼°keyÊÇ·ñ·ûºÏ¹æ¶¨
"""
print( "¼ì²âÎļþÍ·(µÚÒ»ÐÐ)ÊÇ·ñÕýÈ·" )
for index in self.__exportSheetIndex:
self.sheetKeys = []
headList = self.xbook.getRowValues(self.xbook.getSheetByIndex(index), EXPORT_DEFINE_ROW -1 )
enName = [] #¼ì²éÃüÃûÖØ¸´ÁÙʱ±äÁ¿
reTuples = []
self.headerDict[index] = {}
for c, head in enumerate(headList):
if head is None or head.strip() == '': #µ¼³ö±íµÄµÚÒ»ÐÐNone, ÔòÕâÒ»Áн«±»ºöÂÔ
self.__onCheckSheetHeader(self.headerDict[index], c, None)
continue
reTuple = self.__reCheck(head)
if len(reTuple) == 3: #¶¨Òå±»·Ö²ðΪÈý²¿·Ö:name, signs, func,signs¿ÉÒÔÊÇ¿Õ
name, signs, funcName = reTuple[0], reTuple[1][1:-1], reTuple[2][1:-1]
name = self.__convertKeyName(name)
for s in signs: #·ûºÅ¶¨ÒåÊÇ·ñÔÚ¹æÔòÖ®ÄÚ
if s not in EXPORT_ALL_SIGNS:
self.xlsxClear(EXPORT_ERROR_NOSIGN, (EXPORT_DEFINE_ROW, c+1))
if EXPORT_SIGN_GTH in signs: #ÊÇ·ñΪkey
self.sheetKeys.append(c)
if len(self.sheetKeys) > EXPORT_KEY_NUMS: #keyÊÇ·ñ³¬¹ý¹æ¶¨µÄ¸öÊý
self.xlsxClear(EXPORT_ERROR_NUMKEY, (EXPORT_DEFINE_ROW, c+1))
if name not in enName: #name²»ÄÜÖØ¸´
enName.append(name)
else:
self.xlsxClear(EXPORT_ERROR_REPEAT, \
(self.xbook.getSheetNameByIndex(index).encode(FILE_CODE), EXPORT_DEFINE_ROW, c+1))
if not hasFunc(funcName): #funcNameÊÇ·ñ´æÔÚ
self.xlsxClear(EXPORT_ERROR_NOFUNC, (xlsxtool.toGBK(funcName), c+1))
else:
self.xlsxClear(EXPORT_ERROR_HEADER, (self.xbook.getSheetNameByIndex(index).encode(FILE_CODE), EXPORT_DEFINE_ROW, c+1))
self.__onCheckSheetHeader(self.headerDict[index], c, (name, signs, funcName)) #¶¨ÒåÒ»Ðо³£Ê¹ÓôæÆðÀ´ÁË
self.__onCheckDefine()
return
def __onCheckSheetHeader(self, DataDict, col, headerInfo):
DataDict[col] = headerInfo
def __onCheckDefine(self):
if len(self.sheetKeys) != EXPORT_KEY_NUMS: #keyÒ²²»ÄÜÉÙ
self.xlsxClear(EXPORT_ERROR_NOKEY, ("ÐèÒª%d¶øÖ»ÓÐ%d"%(EXPORT_KEY_NUMS,len(self.sheetKeys))))
print( "ÎļþÍ·¼ì²âÕýÈ·", time.ctime(time.time()) )
def sheetIndex2Data(self):
self.sheet2Data = {}
for index in self.__exportSheetIndex:
SheetName = self.xbook.getSheetNameByIndex(index)
sheetName = SheetName[SheetName.find(EXPORT_PREFIX_CHAR)+1:]
if sheetName in self.mapDict:
dataName = self.mapDict[sheetName]
if dataName in self.sheet2Data:
self.sheet2Data[dataName].append(index)
else:
self.sheet2Data[dataName] = [index]
def __checkData(self):
"""
ÁÐÊý¾ÝÊÇ·ñ·ûºÏÃüÃû¹æ·¶, Éú³ÉËùÐè×Öµä
"""
self.sheetIndex2Data()
self.dctDatas = g_dctDatas
self.hasExportedSheet = []
for dataName, indexList in self.sheet2Data.items():
self.curIndexMax = len(indexList)
self.curProIndex = []
for index in indexList:
sheet = self.xbook.getSheetByIndex(index)
self.curProIndex.append(index)
cols = self.xbook.getRowCount(index)
rows = self.xbook.getColCount(index)
if dataName not in self.dctDatas:
self.dctDatas[dataName] = {}
self.dctData = self.dctDatas[dataName]
for row in range(3, rows + 1):
rowval = self.xbook.getRowValues(sheet, row - 1)
childDict = {}
for col in range(1, cols + 1):
val = rowval[col - 1]
if val != None:
val = (str(rowval[col - 1]),)
else:
val = ("",)
#val = (self.xbook.getText(sheet, row, col),)
if self.headerDict[index][col-1] is None:
continue
name, sign, funcName = self.headerDict[index][col-1]
if '$' in sign and len(val[0]) > 0:
self.needReplace({'v':val[0], "pos":(row, col)})
v = self.mapDict[xlsxtool.GTOUC(xlsxtool.val2Str(val[0]))] #mapDict:keyÊÇunicode.key¶¼Òª×ª³Éunicode
else:
v = val[0]
if EXPORT_SIGN_DOT in sign and v is None:
self.xlsxClear(EXPORT_ERROR_NOTNULL, (col, row))
try:
sv = v#xlsxtool.toGBK(v)
except:
sv = v
func = getFunc(funcName)
try:
v = func(self.mapDict, self.dctData, childDict, sv)
except Exception as errstr:
self.xlsxClear(EXPORT_ERROR_FUNC, (errstr, funcName, sv, row, col))
for ss in sign.replace('$',''):
EXPORT_SIGN[ss](self,{"v":v,"pos":(row, col)})
#if isinstance(v, (isinstance, unicode)):
# try:
# v = v.decode("gb2312").encode("utf-8")
# except:
# pass
childDict[name] = v
print( "µ±Ç°:%i/%i" % (row, rows) )
self.dctData[self.tempKeys[-1]] = copy.deepcopy(childDict)
self.writeHead()
overFunc = self.mapDict.get('overFunc')
if overFunc is not None:
func = getFunc(overFunc)
self.dctData = func(self.mapDict, self.dctDatas, self.dctData, dataName)
self.dctDatas[dataName] = self.dctData
g_dctDatas.update(self.dctDatas)
self.__onCheckSheet()
self.__onCheckData()
self.writeFoot()
def __onCheckSheet(self):
if hasattr(self, "tempKeys"):
del self.tempKeys
return
def __onCheckData(self):
self.exportSheet()
##############·ûºÅ×ÖµäµÄÏà¹ØÉèÖÃEXPORT_SIGN###################
def isNotEmpty(self, cellData):
if cellData['v'] is None:
self.xlsxClear(EXPORT_ERROR_NOTNULL, (cellData['pos'], ))
def needReplace(self, cellData):
"""ºêÌæ´ú"""
v = cellData["v"].strip()
if isinstance(v, float): #·ÀÖ¹Êý×Ö±¨´í(1:string) mapDict ÊÇunicode×Ö·û´®
v = str(int(v))
if v not in self.mapDict: #¼ì²â¶ø²»Ìæ»»
self.xlsxClear(EXPORT_ERROR_NOTMAP, (cellData['pos'], v))
def isKey(self, cellData):
if not hasattr(self, "tempKeys"):
self.tempKeys = []
if cellData['v'] not in self.tempKeys:
self.tempKeys.append(cellData['v'])
else:
self.xlsxClear(EXPORT_ERROR_REPKEY, (cellData['pos'], \
(self.tempKeys.index(cellData['v'])+3, cellData['pos'][1] ), cellData['v']) )
###############export to py²¿·Ö######################
def exportSheet(self):
"""
µ¼³ö
"""
self.__onExportSheet()
return
def __onExportSheet(self):
"""
Êý¾Ýת³ÉpyÎļþ
"""
self.writeXLSX2PY()
return
def openFile(self):
"""
ÎļþĿ¼´´½¨
"""
dirPath = os.path.split(self.outfile)[0]
if not os.path.isdir(dirPath):
try:
xlsxtool.createDir(dirPath)
except:
self.xlsxClear(EXPORT_ERROR_CPATH, (dirPath, ))
try:
fileHandler = codecs.open(self.outfile, "w+",'utf-8')
#fileHandler = open(self.outfile, "w+")
except:
self.xlsxClear(EXPORT_ERROR_FILEOPEN, (self.outfile, ))
self.__onOpenFile(fileHandler) #Ŀ¼´´½¨³É¹¦,Îļþ´ò¿ª
return
def __onOpenFile(self, fileHandler):
"""
pyÎļþ´ò¿ªÁË,¿ÉÒÔдÎļþÁË
"""
self.fileName = self.outfile
self.fileHandler = fileHandler
del self.outfile
def xlsxWrite(self, stream):
"""
дÈëdataÎļþ
"""
if not hasattr(self, "fileHandler"):
self.xlsxClear(EXPORT_ERROR_FILEOPEN, ())
try:
self.fileHandler.write(stream)
except Exception as errstr:
self.xlsxClear(EXPORT_ERROR_IOOP, (errstr))
def writeXLSX2PY(self):
"""
Îļþ ǰ¼¸ÐÐÎÄ×Ö
"""
self.writeBody()
return
def writeHead(self):
print( "¿ªÊ¼Ð´ÈëÎļþ:", time.ctime(time.time()) )
try:
SheetName = self.xbook.getSheetNameByIndex(self.curProIndex[-1])
except:
print( "»ñÈ¡±íµÄÃû×Ö³ö´í" )
sheetName = SheetName[SheetName.find(EXPORT_PREFIX_CHAR)+1:]
if sheetName in self.mapDict:
dataName = self.mapDict[sheetName]
self.hasExportedSheet.append(self.curProIndex[-1])
else:
self.xlsxClear(2, (sheetName.encode(FILE_CODE),))
stream = ""
dataFileInfo = (self.infile + '.' + SheetName).encode("UTF-8")
if len(self.hasExportedSheet) <= 1:
stream = EXPORT_DATA_HEAD
globalDefs = self.mapDict.get('globalDefs', '')
if len(globalDefs) > 0:
func = getFunc(globalDefs)
globalDefs = func(self.dctData)
if len(globalDefs) > 0:
globalDefs += "\n"
if "globalDefs" in g_fdatas:
g_fdatas["globalDefs"] += globalDefs
else:
g_fdatas["globalDefs"] = globalDefs
def writeBody(self):
#for index in self.curProIndex:
# xlsxError.info_input(EXPORT_INFO_ING, (self.xbook.getSheetNameByIndex(index).encode(FILE_CODE), ))
self.xlsxWrite(EXPORT_DATA_HEAD)
if "globalDefs" in g_fdatas:
self.xlsxWrite(g_fdatas["globalDefs"])
for dataName, datas in g_dctDatas.items():
stream = dataName + "="
#stream += xlsxtool.dict_to_text(datas) + "\n"
stream += "%s\n" % (datas)
self.xlsxWrite(stream)
jsonhandle = codecs.open(self.fileHandler.stream.name + "." + dataName + ".json", "w+",'utf-8')
s = json.dumps(datas)
jsonhandle.write("{%s}" % (s[1:-1]))
jsonhandle.close()
def writeFoot(self):
"""
Îļþβ
"""
if len(self.hasExportedSheet) < len(self.__exportSheetIndex):
return
allDataDefs = self.mapDict.get('allDataDefs', '')
if len(allDataDefs) > 0:
func = getFunc(allDataDefs)
allDataDefs = func(self.dctData)
if "allDataDefs" in g_fdatas:
g_fdatas["allDataDefs"] += allDataDefs
else:
g_fdatas["allDataDefs"] = allDataDefs
stream = "\nallDatas = {\n"
for dataName, indexList in self.sheet2Data.items():
for index in indexList:
SheetName = self.xbook.getSheetNameByIndex(index)
sheetName = SheetName[SheetName.find(EXPORT_PREFIX_CHAR)+1:]
stream += "\t'" + sheetName
stream += "':"
stream += dataName
stream += ",\n"
if len(allDataDefs) > 0:
stream += "\t" + g_fdatas["allDataDefs"] + ",\n"
stream +="}"
self.xlsxWrite(stream)
self.xlsxbyebye()
print( "дÍêÁËtime:", time.ctime(time.time()) )
##############ÆäËû##################
def xlsxClose(self):
"""
¹Ø±ÕÎĵµ
"""
if hasattr(self, "fileHandler"):
self.fileHandler.close()
self.xbook.close()
return
def xlsxClear(self, errno = 0, msg = ''):
"""
³ÌÐòÒì³£Í˳öÇåÀí´ò¿ªµÄExcel
"""
self.xlsxClose()
if errno > 0:
raise xlsxError.xe(errno, msg)
else:
sys.exit(1)
def xlsxbyebye(self):
"""
Õý³£Í˳ö
"""
self.xlsxClose()
return
def getSheetsCounts(self):
return reduce(lambda x,y:x+y, \
[self.xbook.getColCount(index) for index in self.__exportSheetIndex])
EXPORT_SIGN['.'] = xlsx2py.isNotEmpty
EXPORT_SIGN['$'] = xlsx2py.needReplace
EXPORT_SIGN['!'] = xlsx2py.isKey
def main():
"""
ʹÓ÷½·¨£º
python xlsx2py excelName.xls(x) data.py
"""
try:
outfile = sys.argv[1]
except:
print( main.__doc__ )
return
for infile in sys.argv[2:]:
print( "¿ªÊ¼µ¼±í:[%s] max=%i" % (infile, len(sys.argv[2:])) )
if os.path.isfile(infile):
a = xlsx2py(infile, outfile)
xlsxtool.exportMenu(EXPORT_INFO_OK)
a.run()
else:
xlsxError.error_input(EXPORT_ERROR_NOEXISTFILE, (infile,))
print( '-------------------------------THE END------------------------------------------------' )
sys.exit()
if __name__ == '__main__':
main()
| aabbox/kbengine | kbe/tools/xlsx2py/xlsx2py/xlsx2py.py | Python | lgpl-3.0 | 15,491 |
from ..renderer.rst.doxygen.base import RenderContext
from ..renderer.rst.doxygen import format_parser_error
from ..renderer.rst.doxygen.mask import NullMaskFactory
from ..parser import ParserError, FileIOError
from ..project import ProjectError
from ..finder.core import NoMatchesError
from docutils.parsers import rst
from docutils.parsers.rst.directives import unchanged_required, flag
from docutils import nodes
class WarningHandler(object):
def __init__(self, state, context):
self.state = state
self.context = context
def warn(self, raw_text, rendered_nodes=None):
raw_text = self.format(raw_text)
if rendered_nodes is None:
rendered_nodes = [nodes.paragraph("", "", nodes.Text(raw_text))]
return [
nodes.warning("", *rendered_nodes),
self.state.document.reporter.warning(raw_text, line=self.context['lineno'])
]
def format(self, text):
return text.format(**self.context)
def create_warning(project_info, state, lineno, **kwargs):
tail = ''
if project_info:
tail = 'in doxygen xml output for project "{project}" from directory: {path}'.format(
project=project_info.name(),
path=project_info.project_path()
)
context = dict(
lineno=lineno,
tail=tail,
**kwargs
)
return WarningHandler(state, context)
class BaseDirective(rst.Directive):
def __init__(self, root_data_object, renderer_factory_creator_constructor, finder_factory,
matcher_factory, project_info_factory, filter_factory, target_handler_factory,
*args):
rst.Directive.__init__(self, *args)
self.root_data_object = root_data_object
self.renderer_factory_creator_constructor = renderer_factory_creator_constructor
self.finder_factory = finder_factory
self.matcher_factory = matcher_factory
self.project_info_factory = project_info_factory
self.filter_factory = filter_factory
self.target_handler_factory = target_handler_factory
def render(self, data_object, project_info, options, filter_, target_handler, mask_factory):
"Standard render process used by subclasses"
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
project_info,
self.state.document,
options,
target_handler
)
try:
renderer_factory = renderer_factory_creator.create_factory(
data_object,
self.state,
self.state.document,
filter_,
target_handler,
)
except ParserError as e:
return format_parser_error("doxygenclass", e.error, e.filename, self.state,
self.lineno, True)
except FileIOError as e:
return format_parser_error("doxygenclass", e.error, e.filename, self.state, self.lineno)
context = RenderContext([data_object, self.root_data_object], mask_factory)
object_renderer = renderer_factory.create_renderer(context)
node_list = object_renderer.render()
return node_list
class DoxygenBaseDirective(BaseDirective):
required_arguments = 1
optional_arguments = 1
option_spec = {
"path": unchanged_required,
"project": unchanged_required,
"outline": flag,
"no-link": flag,
}
has_content = False
def run(self):
try:
namespace, name = self.arguments[0].rsplit("::", 1)
except ValueError:
namespace, name = "", self.arguments[0]
try:
project_info = self.project_info_factory.create_project_info(self.options)
except ProjectError as e:
warning = create_warning(None, self.state, self.lineno)
return warning.warn('doxygen%s: %s' % (self.kind, e))
finder = self.finder_factory.create_finder(project_info)
matcher_stack = self.create_matcher_stack(namespace, name)
try:
data_object = finder.find_one(matcher_stack)
except NoMatchesError as e:
display_name = "%s::%s" % (namespace, name) if namespace else name
warning = create_warning(project_info, self.state, self.lineno, name=display_name,
kind=self.kind)
return warning.warn('doxygen{kind}: Cannot find {kind} "{name}" {tail}')
target_handler = self.target_handler_factory.create_target_handler(
self.options, project_info, self.state.document)
filter_ = self.filter_factory.create_outline_filter(self.options)
mask_factory = NullMaskFactory()
return self.render(data_object, project_info, self.options, filter_, target_handler,
mask_factory)
| losalamos/PowerParser | docs/breathe-3.2.0/breathe/directive/base.py | Python | apache-2.0 | 4,942 |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# A collection of fake models used for unit testing
class FakeTI:
def __init__(self, **kwds):
self.__dict__.update(kwds)
def pool_full(self):
# Allow users of this fake to set pool_filled in the constructor to make this
# return True
try:
return self.pool_filled
except AttributeError:
# If pool_filled was not set default to false
return False
def get_dagrun(self, _):
return self.dagrun
def are_dependents_done(self, session): # pylint: disable=unused-argument
return self.dependents_done
class FakeTask:
def __init__(self, **kwds):
self.__dict__.update(kwds)
class FakeDag:
def __init__(self, **kwds):
self.__dict__.update(kwds)
def get_running_dagruns(self, _):
return self.running_dagruns
class FakeContext:
def __init__(self, **kwds):
self.__dict__.update(kwds)
| Fokko/incubator-airflow | tests/ti_deps/deps/fake_models.py | Python | apache-2.0 | 1,753 |
# -*- coding: utf-8 -*-
'''Pythonic wrappers for AACGM-V2 C functions.
Functions
--------------
convert_latlon
convert_latlon_arr
get_aacgm_coord
get_aacgm_coord_arr
convert_str_to_bit
--------------
'''
from __future__ import division, print_function, absolute_import
from __future__ import unicode_literals
import numpy as np
import datetime as dt
import logging
def convert_latlon(in_lat, in_lon, height, dtime, code="G2A", igrf_file=None,
coeff_prefix=None):
'''Converts between geomagnetic coordinates and AACGM coordinates
Parameters
------------
in_lat : (float)
Input latitude in degrees N (code specifies type of latitude)
in_lon : (float)
Input longitude in degrees E (code specifies type of longitude)
height : (float)
Altitude above the surface of the earth in km
dtime : (datetime)
Datetime for magnetic field
code : Optional[str]
String denoting which type(s) of conversion to perform
G2A - geographic (geodetic) to AACGM-v2
A2G - AACGM-v2 to geographic (geodetic)
TRACE - use field-line tracing, not coefficients
ALLOWTRACE - use trace only above 2000 km
BADIDEA - use coefficients above 2000 km
GEOCENTRIC - assume inputs are geocentric w/ RE=6371.2
(default is "G2A")
igrf_file : Optional[str]
Full filename of IGRF coefficient file or None to use
rcParams["IGRF_DAVITPY_COEFF_FILE"]. (default=None)
coeff_prefix : Optional[str]
Location and file prefix for aacgm coefficient files or None to use
rcParams["AACGM_DAVITPY_DAT_PREFEX"]. (default=None)
Returns
-------
out_lat : (float)
Output latitude in degrees
out_lon : (float)
Output longitude in degrees
out_r : (float)
Geocentric radial distance in R
'''
from davitpy import rcParams
from davitpy.models import aacgm
# Define coefficient file prefix if not supplied
if coeff_prefix is None:
coeff_prefix = rcParams['AACGM_DAVITPY_DAT_PREFIX']
# Define IGRF file if not supplied
if igrf_file is None:
igrf_file = rcParams['IGRF_DAVITPY_COEFF_FILE']
# Test time
if isinstance(dtime, dt.date):
date = dt.datetime.combine(dtime, dt.time(0))
assert isinstance(dtime, dt.datetime), \
logging.error('time must be specified as datetime object')
# Test height
if height < 0:
logging.warn('conversion not intended for altitudes < 0 km')
# Test code
code = code.upper()
if(height > 2000 and code.find("TRACE") < 0 and
code.find("ALLOWTRACE") < 0 and code.find("BADIDEA")):
estr = 'coefficients are not valid for altitudes above 2000 km. You '
estr += 'must either use field-line tracing (trace=True '
estr += 'or allowtrace=True) or indicate you know this '
estr += 'is a bad idea'
logging.error(estr)
# Test latitude range
if abs(in_lat) > 90.0:
assert abs(in_lat) <= 90.1, logging.error('unrealistic latitude')
in_lat = np.sign(in_lat) * 90.0
# Constrain longitudes between -180 and 180
in_lon = ((in_lon + 180.0) % 360.0) - 180.0
# Set current date and time
aacgm.set_datetime(dtime.year, dtime.month, dtime.day, dtime.hour,
dtime.minute, dtime.second, coeff_prefix)
# make flag
bit_code = convert_str_to_bit(code)
# convert
lat_out, lon_out, r_out = aacgm.convert(in_lat, in_lon, height, bit_code,
igrf_file)
return lat_out, lon_out, r_out
def convert_latlon_arr(in_lat, in_lon, height, dtime, code="G2A",
igrf_file=None, coeff_prefix=None):
'''Converts between geomagnetic coordinates and AACGM coordinates
Parameters
------------
in_lat : (np.ndarray)
Input latitude in degrees N (code specifies type of latitude)
in_lon : (np.ndarray)
Input longitude in degrees E (code specifies type of longitude)
height : (np.ndarray)
Altitude above the surface of the earth in km
dtime : (datetime)
Single datetime object for magnetic field
code : Optional[str]
String denoting which type(s) of conversion to perform
G2A - geographic (geodetic) to AACGM-v2
A2G - AACGM-v2 to geographic (geodetic)
TRACE - use field-line tracing, not coefficients
ALLOWTRACE - use trace only above 2000 km
BADIDEA - use coefficients above 2000 km
GEOCENTRIC - assume inputs are geocentric w/ RE=6371.2
(default = "G2A")
igrf_file : Optional[str]
Full filename of IGRF coefficient file or None to use
rcParams["IGRF_DAVITPY_COEFF_FILE"]. (default=None)
coeff_prefix : Optional[str]
Location and file prefix for aacgm coefficient files or None to use
rcParams["AACGM_DAVITPY_DAT_PREFEX"]. (default=None)
Returns
-------
out_lat : (np.ndarray)
Output latitudes in degrees
out_lon : (np.ndarray)
Output longitudes in degrees
out_r : (np.ndarray)
Geocentric radial distances in R
'''
from davitpy import rcParams
from davitpy.models import aacgm
import numpy as np
# If someone was lazy and entered a list instead of a numpy array,
# recast it here
if isinstance(in_lat, list):
in_lat = np.array(in_lat)
if isinstance(in_lon, list):
in_lon = np.array(in_lon)
if isinstance(height, list):
height = np.array(height)
# Ensure that lat, lon, and height are the same length or if the lengths
# differ that the different ones contain only a single value
ulen = np.unique([height.shape, in_lat.shape, in_lon.shape])
if ulen.shape[0] > 2 or (ulen.shape[0] == 2 and ulen[0] > 1):
logging.error("mismatched input arrays")
return None, None, None
# Define coefficient file prefix if not supplied
if coeff_prefix is None:
coeff_prefix = rcParams['AACGM_DAVITPY_DAT_PREFIX']
# Define IGRF file if not supplied
if igrf_file is None:
igrf_file = rcParams['IGRF_DAVITPY_COEFF_FILE']
# Test time
if isinstance(dtime, dt.date):
date = dt.datetime.combine(dtime, dt.time(0))
assert isinstance(dtime, dt.datetime), \
logging.error('time must be specified as datetime object')
# Test height
if np.min(height) < 0:
logging.warn('conversion not intended for altitudes < 0 km')
# Test code
code = code.upper()
if(np.max(height) > 2000 and code.find("TRACE") < 0 and
code.find("ALLOWTRACE") < 0 and code.find("BADIDEA")):
estr = 'coefficients are not valid for altitudes above 2000 km. You '
estr += 'must either use field-line tracing (trace=True '
estr += 'or allowtrace=True) or indicate you know this '
estr += 'is a bad idea'
logging.error(estr)
# Test latitude range
if np.abs(in_lat).max() > 90.0:
assert np.abs(in_lat).max() <= 90.1, \
logging.error('unrealistic latitude')
in_lat = np.clip(in_lat, -90.0, 90.0)
# Constrain longitudes between -180 and 180
in_lon = ((in_lon + 180.0) % 360.0) - 180.0
# Set current date and time
aacgm.set_datetime(dtime.year, dtime.month, dtime.day, dtime.hour,
dtime.minute, dtime.second, coeff_prefix)
# make flag
bit_code = convert_str_to_bit(code)
# Vectorise the AACGM code
convert_vectorised = np.vectorize(aacgm.convert)
# convert
lat_out, lon_out, r_out = convert_vectorised(in_lat, in_lon, height,
bit_code, igrf_file)
return lat_out, lon_out, r_out
def get_aacgm_coord(glat, glon, height, dtime, method="TRACE",
igrf_file=None, coeff_prefix=None):
'''Get AACGM latitude, longitude, and magnetic local time
Parameters
------------
glat : (float)
Geodetic latitude in degrees N
glon : (float)
Geodetic longitude in degrees E
height : (float)
Altitude above the surface of the earth in km
dtime : (datetime)
Date and time to calculate magnetic location
method : Optional[str]
String denoting which type(s) of conversion to perform
TRACE - use field-line tracing, not coefficients
ALLOWTRACE - use trace only above 2000 km
BADIDEA - use coefficients above 2000 km
GEOCENTRIC - assume inputs are geocentric w/ RE=6371.2
(default = "TRACE")
igrf_file : Optional[str]
Full filename of IGRF coefficient file or None to use
rcParams["IGRF_DAVITPY_COEFF_FILE"]. (default=None)
coeff_prefix : Optional[str]
Location and file prefix for aacgm coefficient files or None to use
rcParams["AACGM_DAVITPY_DAT_PREFEX"]. (default=None)
Returns
-------
mlat : (float)
magnetic latitude in degrees
mlon : (float)
magnetic longitude in degrees
mlt : (float)
magnetic local time in hours
'''
from davitpy import rcParams
from davitpy.models import aacgm
# Define coefficient file prefix if not supplied
if coeff_prefix is None:
coeff_prefix = rcParams['AACGM_DAVITPY_DAT_PREFIX']
# Define IGRF file if not supplied
if igrf_file is None:
igrf_file = rcParams['IGRF_DAVITPY_COEFF_FILE']
# Initialize return values
mlat = None
mlon = None
mlt = None
try:
# Get magnetic lat and lon.
mlat, mlon, mr = convert_latlon(glat, glon, height, dtime,
code="G2A|{:s}".format(method),
igrf_file=igrf_file,
coeff_prefix=coeff_prefix)
# Get magnetic local time
mlt = aacgm.mlt_convert(dtime.year, dtime.month, dtime.day, dtime.hour,
dtime.minute, dtime.second, mlon, coeff_prefix,
igrf_file)
except:
logging.error("Unable to get magnetic lat/lon")
return mlat, mlon, mlt
def get_aacgm_coord_arr(glat, glon, height, dtime, method="TRACE",
igrf_file=None, coeff_prefix=None):
'''Get AACGM latitude, longitude, and magnetic local time
Parameters
------------
glat : (np.array or list)
Geodetic latitude in degrees N
glon : (np.array or list)
Geodetic longitude in degrees E
height : (np.array or list)
Altitude above the surface of the earth in km
dtime : (datetime)
Date and time to calculate magnetic location
method : Optioanl[str]
String denoting which type(s) of conversion to perform
TRACE - use field-line tracing, not coefficients
ALLOWTRACE - use trace only above 2000 km
BADIDEA - use coefficients above 2000 km
GEOCENTRIC - assume inputs are geocentric w/ RE=6371.2
(default = "TRACE")
igrf_file : Optional[str]
Full filename of IGRF coefficient file or None to use
rcParams["IGRF_DAVITPY_COEFF_FILE"]. (default=None)
coeff_prefix : Optional[str]
Location and file prefix for aacgm coefficient files or None to use
rcParams["AACGM_DAVITPY_DAT_PREFEX"]. (default=None)
Returns
-------
mlat : (float)
magnetic latitude in degrees
mlon : (float)
magnetic longitude in degrees
mlt : (float)
magnetic local time in hours
'''
from davitpy import rcParams
from davitpy.models import aacgm
import numpy as np
# Define coefficient file prefix if not supplied
if coeff_prefix is None:
coeff_prefix = rcParams['AACGM_DAVITPY_DAT_PREFIX']
# Define IGRF file if not supplied
if igrf_file is None:
igrf_file = rcParams['IGRF_DAVITPY_COEFF_FILE']
# Initialize return values
mlat = None
mlon = None
mlt = None
try:
# Get magnetic lat and lon.
mlat, mlon, mr = convert_latlon_arr(glat, glon, height, dtime,
code="G2A|{:s}".format(method),
igrf_file=igrf_file,
coeff_prefix=coeff_prefix)
if mlon is not None:
# Get magnetic local time
mlt_vectorised = np.vectorize(aacgm.mlt_convert)
mlt = mlt_vectorised(dtime.year, dtime.month, dtime.day,
dtime.hour, dtime.minute, dtime.second, mlon,
coeff_prefix, igrf_file)
except:
logging.error("Unable to get magnetic lat/lon")
return mlat, mlon, mlt
def convert_str_to_bit(code):
'''convert string code specification to bit code specification
Parameters
code : (str)
Bitwise code for passing options into converter (default=0)
G2A - geographic (geodetic) to AACGM-v2
A2G - AACGM-v2 to geographic (geodetic)
TRACE - use field-line tracing, not coefficients
ALLOWTRACE - use trace only above 2000 km
BADIDEA - use coefficients above 2000 km
GEOCENTRIC - assume inputs are geocentric w/ RE=6371.2
'''
from davitpy.models import aacgm
convert_code = {"G2A": aacgm.G2A, "A2G": aacgm.A2G, "TRACE": aacgm.TRACE,
"GEOCENTRIC": aacgm.GEOCENTRIC,
"ALLOWTRACE": aacgm.ALLOWTRACE, "BADIDEA": aacgm.BADIDEA}
code = code.upper()
bit_code = sum([convert_code[k] for k in convert_code.keys()
if code.find(k) >= 0])
return bit_code
| vtsuperdarn/davitpy | davitpy/models/aacgm/wrapper.py | Python | gpl-3.0 | 13,759 |
## This is my implementation of example6.py
## Example 7: Generation of biorthogonal scaling functions and wavelets.
## using Python libraries numpy, scipy, matlibplot, PyWavelets
## this needs biphivals.py (just import it in from the same directory!)
##
## The main reference that I'll use is
## Gilbert Strang, and Kevin Amaratunga. 18.327 Wavelets, Filter Banks and Applications, Spring 2003. (Massachusetts Institute of Technology: MIT OpenCourseWare), http://ocw.mit.edu (Accessed 19 Jun, 2015). License: Creative Commons BY-NC-SA
##
##
##
#####################################################################################
## Copyleft 2015, Ernest Yeung <[email protected]>
##
## 20150702
##
## This program, along with all its code, is free software;
## you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You can have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software Foundation, Inc.,
## S1 Franklin Street, Fifth Floor, Boston, MA
## 02110-1301, USA
##
## Governing the ethics of using this program, I default to the Caltech Honor Code:
## ``No member of the Caltech community shall take unfair advantage of
## any other member of the Caltech community.''
##
## If you like what I'm doing and would like to help and contribute support,
## please take a look at my crowdfunding campaign at ernestyalumni.tilt.com
## and subscription-based Patreon
## read my mission statement and give your financial support,
## no matter how small or large,
## if you can
## and to keep checking my ernestyalumni.wordpress.com blog and
## various social media channels
## for updates as I try to keep putting out great stuff.
##
## Fund Science! Help my physics education outreach and research efforts at
## Open/Tilt or subscription Patreon - Ernest Yeung
##
## ernestyalumni.tilt.com
##
## Facebook : ernestyalumni
## gmail : ernestyalumni
## google : ernestyalumni
## linkedin : ernestyalumni
## Patreon : ernestyalumni
## Tilt/Open : ernestyalumni
## tumblr : ernestyalumni
## twitter : ernestyalumni
## youtube : ernestyalumni
## wordpress : ernestyalumni
##
##
################################################################################
##
import numpy as np
import matplotlib.pyplot as plt
import pywt
from biphivals import biphivals
# Example 3a: Compute the samples of the biorthogonal scaling functions
# and wavelets
# 9/7 filters
# create the biorthogonal spline wavelet with 4 vanishing moments object
w_bior = pywt.Wavelet('bior4.4')
[h0,h1,f0,f1] = w_bior.dec_lo, w_bior.dec_hi, w_bior.rec_lo, w_bior.rec_hi
[x,phi,phitilde,psi,psitilde] = biphivals(h0,h1,f0,f1,5)
plt.figure(1)
plt.plot(x,phi,'-',label="Primary scaling function")
plt.plot(x,psi,'-.',label="Primary wavelet")
plt.legend()
plt.title("Primary Daubachies 9/7 Pair")
plt.figure(2)
plt.plot(x,phitilde,'--',label="Dual scaling function")
plt.plot(x,psitilde,':',label="Dual wavelet")
plt.legend()
plt.title('Dual Daubachies 9/7 pair')
| ernestyalumni/18-327-wavelets-filter-banks | tools/example7.py | Python | mit | 5,011 |
# coding=utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from flask import Flask, request
from flask.ext.restful import Resource, fields, marshal_with, reqparse, abort
from flask.globals import g
from jormungandr import i_manager, timezone
from jormungandr.interfaces.v1.fields import disruption_marshaller
from jormungandr.interfaces.v1.make_links import add_id_links
from jormungandr.interfaces.v1.fields import NonNullList, NonNullNested, PbField, error, pt_object, feed_publisher
from jormungandr.interfaces.v1.ResourceUri import ResourceUri
from jormungandr.interfaces.argument import ArgumentDoc
from jormungandr.interfaces.parsers import depth_argument, option_value, default_count_arg_type, date_time_format
from copy import deepcopy
import datetime
pt_objects = {
"pt_objects": NonNullList(NonNullNested(pt_object), attribute='places'),
"disruptions": fields.List(NonNullNested(disruption_marshaller), attribute="impacts"),
"error": PbField(error, attribute='error'),
"feed_publishers": fields.List(NonNullNested(feed_publisher))
}
pt_object_type_values = ["network", "commercial_mode", "line", "line_group", "route", "stop_area"]
class Ptobjects(ResourceUri):
def __init__(self, *args, **kwargs):
ResourceUri.__init__(self, *args, **kwargs)
self.parsers = {}
self.parsers["get"] = reqparse.RequestParser(
argument_class=ArgumentDoc)
self.parsers["get"].add_argument("q", type=unicode, required=True,
description="The data to search")
self.parsers["get"].add_argument("type[]", type=option_value(pt_object_type_values),
action="append",default=pt_object_type_values,
description="The type of data to\
search")
self.parsers["get"].add_argument("count", type=default_count_arg_type, default=10,
description="The maximum number of\
ptobjects returned")
self.parsers["get"].add_argument("search_type", type=int, default=0,
description="Type of search:\
firstletter or type error")
self.parsers["get"].add_argument("admin_uri[]", type=unicode,
action="append",
description="If filled, will\
restrained the search within the\
given admin uris")
self.parsers["get"].add_argument("depth", type=depth_argument,
default=1,
description="The depth of objects")
self.parsers["get"].add_argument("_current_datetime", type=date_time_format, default=datetime.datetime.utcnow(),
description="The datetime used to consider the state of the pt object"
" Default is the current date and it is used for debug."
" Note: it will mainly change the disruptions that concern "
"the object The timezone should be specified in the format,"
" else we consider it as UTC")
@marshal_with(pt_objects)
def get(self, region=None, lon=None, lat=None):
self.region = i_manager.get_region(region, lon, lat)
timezone.set_request_timezone(self.region)
args = self.parsers["get"].parse_args()
self._register_interpreted_parameters(args)
if len(args['q']) == 0:
abort(400, message="Search word absent")
response = i_manager.dispatch(args, "pt_objects",
instance_name=self.region)
return response, 200
| ballouche/navitia | source/jormungandr/jormungandr/interfaces/v1/Ptobjects.py | Python | agpl-3.0 | 5,265 |
import os
import warnings
#from distutils.core import setup
from setuptools import setup
with open(os.path.join('bctools', '__init__.py')) as init_:
for line in init_:
if '__version__' in line:
version = line.split('=')[-1].strip().replace('"','')
break
else:
version = 'unknown'
warnings.warn('Unable to find version, using "%s"' % version)
input("Continue?")
setup(name='bctools',
version=version,
description='BitCurator report and GUI tools',
author='Kam Woods',
author_email='[email protected]',
maintainer = "Kam Woods",
maintainer_email = "[email protected]",
url="https://github.com/kamwoods/bitcurator",
#packages=['bctools', ],
#package_data={'bctools': ['font/*.ttf', 'font/*.txt']},
py_modules = ['dfxml', 'fiwalk', 'bc_config', 'bc_genrep_dfxml', 'bc_genrep_feature_xls', 'bc_genrep_premis', 'bc_genrep_text', 'bc_genrep_xls', 'bc_graph', 'bc_pdf', 'bc_regress', 'bc_utils'],
scripts = ['bc_disk_access_v2.py', 'bc_reports_tab.py', 'generate_report.py'],
classifiers = ['Development Status :: 2 - Pre-Alpha'
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Programming Language :: Python",
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0'
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules"],
keywords="bitcurator")
| dmnyu/bitcurator | bctools/setup.py | Python | gpl-3.0 | 1,787 |
# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import pdb
import bpython
class BPdb(pdb.Pdb):
""" PDB with BPython support. """
def __init__(self):
pdb.Pdb.__init__(self)
self.rcLines = []
self.prompt = '(BPdb) '
self.intro = 'Use "B" to enter bpython, Ctrl-d to exit it.'
def postloop(self):
# We only want to show the intro message once.
self.intro = None
pdb.Pdb.postloop(self)
### cmd.Cmd commands
def do_Bpython(self, arg):
bpython.embed(self.curframe.f_locals, ['-i'])
def help_Bpython(self):
print("B(python)")
print()
print ("Invoke the bpython interpreter for this stack frame. To exit "
"bpython and return to a standard pdb press Ctrl-d")
### shortcuts
do_B = do_Bpython
help_B = help_Bpython
| fenhl/bpython-blessings | bpdb/debugger.py | Python | mit | 1,918 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Optimizer ops for use in layers and tf.learn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib import framework as contrib_framework
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as vars_
from tensorflow.python.training import optimizer as optimizer_
from tensorflow.python.training import training as train
OPTIMIZER_CLS_NAMES = {
"Adagrad": train.AdagradOptimizer,
"Adam": train.AdamOptimizer,
"Ftrl": train.FtrlOptimizer,
"Momentum": train.MomentumOptimizer,
"RMSProp": train.RMSPropOptimizer,
"SGD": train.GradientDescentOptimizer,
}
OPTIMIZER_SUMMARIES = [
"learning_rate",
"loss",
"gradients",
"gradient_norm",
]
def optimize_loss(loss,
global_step,
learning_rate,
optimizer,
gradient_noise_scale=None,
gradient_multipliers=None,
clip_gradients=None,
learning_rate_decay_fn=None,
update_ops=None,
variables=None,
name=None,
summaries=None,
colocate_gradients_with_ops=False):
"""Given loss and parameters for optimizer, returns a training op.
Various ways of passing optimizers, include:
- string, name of the optimizer like 'SGD', 'Adam', see OPTIMIZER_CLS_NAMES
for full list. E.g. `optimize_loss(..., optimizer='Adam')`.
- function, takes learning rate `Tensor` as argument and must return
`Optimizer` instance. E.g. `optimize_loss(...,
optimizer=lambda lr: tf.train.MomentumOptimizer(lr, momentum=0.5))`.
Alternatively, if `learning_rate` is `None`, the function takes no
arguments. E.g. `optimize_loss(..., learning_rate=None,
optimizer=lambda: tf.train.MomentumOptimizer(0.5, momentum=0.5))`.
- class, subclass of `Optimizer` that takes only one required argument -
learning rate, such as AdamOptimizer, AdagradOptimizer.
E.g. `optimize_loss(..., optimizer=tf.train.AdagradOptimizer)`.
- object, instance of subclass of `Optimizer`.
E.g., `optimizer_loss(..., optimizer=tf.train.AdagradOptimizer(0.5))`.
Args:
loss: Scalar `Tensor`.
global_step: Scalar int `Tensor`, step counter for each update. If not
supplied, it will be fetched from the default graph (see
`tf.contrib.framework.get_global_step` for details). If it's
not been created, no step will be incremented with each weight
update. `learning_rate_decay_fn` requires `global_step`.
learning_rate: float or `Tensor`, magnitude of update per each training
step. Can be `None`.
optimizer: string, class or optimizer instance, used as trainer.
string should be name of optimizer, like 'SGD',
'Adam', 'Adagrad'. Full list in OPTIMIZER_CLS_NAMES constant.
class should be sub-class of `tf.Optimizer` that implements
`compute_gradients` and `apply_gradients` functions.
optimizer instance should be instantiation of `tf.Optimizer`
sub-class and have `compute_gradients` and `apply_gradients`
functions.
gradient_noise_scale: float or None, adds 0-mean normal noise scaled by this
value.
gradient_multipliers: dict of variables or variable names to floats.
If present, gradients for specified
variables will be multiplied by given constant.
clip_gradients: float or `None`, clips gradients by this value.
learning_rate_decay_fn: function, takes `learning_rate` and `global_step`
`Tensor`s, returns `Tensor`.
Can be used to implement any learning rate decay
functions.
For example: `tf.train.exponential_decay`.
Ignored if `learning_rate` is not supplied.
update_ops: list of update `Operation`s to execute at each step. If `None`,
uses elements of UPDATE_OPS collection. The order of execution
between `update_ops` and `loss` is non-deterministic.
variables: list of variables to optimize or
`None` to use all trainable variables.
name: The name for this operation is used to scope operations and summaries.
summaries: List of internal quantities to visualize on tensorboard. If not
set only the loss and the learning rate will be reported. The
complete list is in OPTIMIZER_SUMMARIES.
colocate_gradients_with_ops: If True, try colocating gradients with the
corresponding op.
Returns:
Training op.
Raises:
ValueError: if:
* `loss` is an invalid type or shape.
* `global_step` is an invalid type or shape.
* `learning_rate` is an invalid type or value.
* `optimizer` is wrong type.
* `learning_rate` and `learning_rate_decay_fn` are supplied, but no
`global_step` is available.
"""
loss = ops.convert_to_tensor(loss)
contrib_framework.assert_scalar(loss)
if global_step is None:
global_step = contrib_framework.get_global_step()
else:
contrib_framework.assert_global_step(global_step)
with vs.variable_scope(name, "OptimizeLoss", [loss, global_step]):
# Update ops take UPDATE_OPS collection if not provided.
if update_ops is None:
update_ops = set(ops.get_collection(ops.GraphKeys.UPDATE_OPS))
# Make sure update ops are ran before computing loss.
if update_ops:
loss = control_flow_ops.with_dependencies(list(update_ops), loss)
# Learning rate variable, with possible decay.
lr = None
if learning_rate is not None:
if (isinstance(learning_rate, ops.Tensor)
and learning_rate.get_shape().ndims == 0):
lr = learning_rate
elif isinstance(learning_rate, float):
if learning_rate < 0.0:
raise ValueError("Invalid learning_rate %s.", learning_rate)
lr = vs.get_variable(
"learning_rate", [], trainable=False,
initializer=init_ops.constant_initializer(learning_rate))
else:
raise ValueError("Learning rate should be 0d Tensor or float. "
"Got %s of type %s" % (
str(learning_rate), str(type(learning_rate))))
if summaries is None:
summaries = ["loss", "learning_rate"]
if learning_rate is not None and learning_rate_decay_fn is not None:
if global_step is None:
raise ValueError("global_step is required for learning_rate_decay_fn.")
lr = learning_rate_decay_fn(lr, global_step)
if "learning_rate" in summaries:
logging_ops.scalar_summary("learning_rate", lr)
# Create optimizer, given specified parameters.
if isinstance(optimizer, six.string_types):
if lr is None:
raise ValueError("Learning rate is None, but should be specified if "
"optimizer is string (%s)." % optimizer)
if optimizer not in OPTIMIZER_CLS_NAMES:
raise ValueError(
"Optimizer name should be one of [%s], you provided %s."
% (", ".join(OPTIMIZER_CLS_NAMES), optimizer))
opt = OPTIMIZER_CLS_NAMES[optimizer](learning_rate=lr)
elif (isinstance(optimizer, type)
and issubclass(optimizer, optimizer_.Optimizer)):
if lr is None:
raise ValueError("Learning rate is None, but should be specified if "
"optimizer is class (%s)." % optimizer)
opt = optimizer(learning_rate=lr)
elif isinstance(optimizer, optimizer_.Optimizer):
opt = optimizer
elif callable(optimizer):
if learning_rate is not None:
opt = optimizer(lr)
else:
opt = optimizer()
if not isinstance(opt, optimizer_.Optimizer):
raise ValueError("Unrecognized optimizer: function should return "
"subclass of Optimizer. Got %s." % str(opt))
else:
raise ValueError("Unrecognized optimizer: should be string, "
"subclass of Optimizer, instance of "
"subclass of Optimizer or function with one argument. "
"Got %s." % str(optimizer))
# All trainable variables, if specific variables are not specified.
if variables is None:
variables = vars_.trainable_variables()
# Compute gradients.
gradients = opt.compute_gradients(
loss, variables,
colocate_gradients_with_ops=colocate_gradients_with_ops)
# Optionally add gradient noise.
if gradient_noise_scale is not None:
gradients = _add_scaled_noise_to_gradients(
gradients, gradient_noise_scale)
# Multiply some gradients.
if gradient_multipliers is not None:
gradients = _multiply_gradients(gradients, gradient_multipliers)
# Optionally clip gradients by global norm.
if clip_gradients is not None:
gradients = _clip_gradients_by_norm(gradients, clip_gradients)
# Add scalar summary for loss.
if "loss" in summaries:
logging_ops.scalar_summary("loss", loss)
# Add histograms for variables, gradients and gradient norms.
for gradient, variable in gradients:
if isinstance(gradient, ops.IndexedSlices):
grad_values = gradient.values
else:
grad_values = gradient
if grad_values is not None:
if "gradients" in summaries:
logging_ops.histogram_summary(variable.name + "/gradients",
grad_values)
if "gradient_norm" in summaries:
logging_ops.histogram_summary(variable.name + "/gradient_norm",
clip_ops.global_norm([grad_values]))
# Create gradient updates.
grad_updates = opt.apply_gradients(gradients,
global_step=global_step,
name="train")
# Ensure the train_tensor computes grad_updates.
train_tensor = control_flow_ops.with_dependencies([grad_updates], loss)
return train_tensor
def _clip_gradients_by_norm(grads_and_vars, clip_gradients):
"""Clips gradients by global norm."""
gradients, variables = zip(*grads_and_vars)
clipped_gradients, _ = clip_ops.clip_by_global_norm(gradients,
clip_gradients)
return list(zip(clipped_gradients, variables))
def _add_scaled_noise_to_gradients(grads_and_vars, gradient_noise_scale):
"""Adds scaled noise from a 0-mean normal distribution to gradients."""
gradients, variables = zip(*grads_and_vars)
noisy_gradients = []
for gradient in gradients:
if gradient is None:
noisy_gradients.append(None)
continue
if isinstance(gradient, ops.IndexedSlices):
gradient_shape = gradient.dense_shape
else:
gradient_shape = gradient.get_shape()
noise = random_ops.truncated_normal(gradient_shape) * gradient_noise_scale
noisy_gradients.append(gradient + noise)
return list(zip(noisy_gradients, variables))
def _multiply_gradients(grads_and_vars, gradient_multipliers):
"""Multiply specified gradients."""
multiplied_grads_and_vars = []
for grad, var in grads_and_vars:
if (grad is not None and
(var in gradient_multipliers or var.name in gradient_multipliers)):
key = var if var in gradient_multipliers else var.name
multiplier = constant_op.constant(
gradient_multipliers[key], dtype=dtypes.float32)
if isinstance(grad, ops.IndexedSlices):
grad_values = grad.values * multiplier
grad = ops.IndexedSlices(grad_values, grad.indices, grad.dense_shape)
else:
grad *= multiplier
multiplied_grads_and_vars.append((grad, var))
return multiplied_grads_and_vars
| cg31/tensorflow | tensorflow/contrib/layers/python/layers/optimizers.py | Python | apache-2.0 | 13,132 |
"""Support for functionality to interact with Android TV / Fire TV devices."""
from __future__ import annotations
from datetime import datetime
import functools
import logging
from adb_shell.exceptions import (
AdbTimeoutError,
InvalidChecksumError,
InvalidCommandError,
InvalidResponseError,
TcpTimeoutException,
)
from androidtv import ha_state_detection_rules_validator
from androidtv.constants import APPS, KEYS
from androidtv.exceptions import LockNotAcquiredException
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
ATTR_COMMAND,
ATTR_CONNECTIONS,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_SW_VERSION,
CONF_DEVICE_CLASS,
CONF_HOST,
CONF_NAME,
CONF_PORT,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
STATE_STANDBY,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType
from .const import (
ANDROID_DEV,
ANDROID_DEV_OPT,
CONF_ADB_SERVER_IP,
CONF_ADB_SERVER_PORT,
CONF_ADBKEY,
CONF_APPS,
CONF_EXCLUDE_UNNAMED_APPS,
CONF_GET_SOURCES,
CONF_MIGRATION_OPTIONS,
CONF_SCREENCAP,
CONF_STATE_DETECTION_RULES,
CONF_TURN_OFF_COMMAND,
CONF_TURN_ON_COMMAND,
DEFAULT_ADB_SERVER_PORT,
DEFAULT_DEVICE_CLASS,
DEFAULT_EXCLUDE_UNNAMED_APPS,
DEFAULT_GET_SOURCES,
DEFAULT_PORT,
DEFAULT_SCREENCAP,
DEVICE_ANDROIDTV,
DEVICE_CLASSES,
DOMAIN,
PROP_ETHMAC,
PROP_WIFIMAC,
SIGNAL_CONFIG_ENTITY,
)
_LOGGER = logging.getLogger(__name__)
SUPPORT_ANDROIDTV = (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_SELECT_SOURCE
| SUPPORT_STOP
| SUPPORT_VOLUME_MUTE
| SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_STEP
)
SUPPORT_FIRETV = (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_TURN_ON
| SUPPORT_TURN_OFF
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_SELECT_SOURCE
| SUPPORT_STOP
)
ATTR_ADB_RESPONSE = "adb_response"
ATTR_DEVICE_PATH = "device_path"
ATTR_HDMI_INPUT = "hdmi_input"
ATTR_LOCAL_PATH = "local_path"
SERVICE_ADB_COMMAND = "adb_command"
SERVICE_DOWNLOAD = "download"
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
SERVICE_UPLOAD = "upload"
DEFAULT_NAME = "Android TV"
# Deprecated in Home Assistant 2022.2
PLATFORM_SCHEMA = cv.deprecated(
vol.All(
PLATFORM_SCHEMA=PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
DEVICE_CLASSES
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_ADBKEY): cv.isfile,
vol.Optional(CONF_ADB_SERVER_IP): cv.string,
vol.Optional(
CONF_ADB_SERVER_PORT, default=DEFAULT_ADB_SERVER_PORT
): cv.port,
vol.Optional(CONF_GET_SOURCES, default=DEFAULT_GET_SOURCES): cv.boolean,
vol.Optional(CONF_APPS, default={}): vol.Schema(
{cv.string: vol.Any(cv.string, None)}
),
vol.Optional(CONF_TURN_ON_COMMAND): cv.string,
vol.Optional(CONF_TURN_OFF_COMMAND): cv.string,
vol.Optional(CONF_STATE_DETECTION_RULES, default={}): vol.Schema(
{cv.string: ha_state_detection_rules_validator(vol.Invalid)}
),
vol.Optional(
CONF_EXCLUDE_UNNAMED_APPS, default=DEFAULT_EXCLUDE_UNNAMED_APPS
): cv.boolean,
vol.Optional(CONF_SCREENCAP, default=DEFAULT_SCREENCAP): cv.boolean,
}
),
)
)
# Translate from `AndroidTV` / `FireTV` reported state to HA state.
ANDROIDTV_STATES = {
"off": STATE_OFF,
"idle": STATE_IDLE,
"standby": STATE_STANDBY,
"playing": STATE_PLAYING,
"paused": STATE_PAUSED,
}
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info=None,
) -> None:
"""Set up the Android TV / Fire TV platform."""
host = config[CONF_HOST]
# get main data
config_data = {
CONF_HOST: host,
CONF_DEVICE_CLASS: config.get(CONF_DEVICE_CLASS, DEFAULT_DEVICE_CLASS),
CONF_PORT: config.get(CONF_PORT, DEFAULT_PORT),
}
for key in (CONF_ADBKEY, CONF_ADB_SERVER_IP, CONF_ADB_SERVER_PORT, CONF_NAME):
if key in config:
config_data[key] = config[key]
# get options
config_options = {
key: config[key]
for key in (
CONF_APPS,
CONF_EXCLUDE_UNNAMED_APPS,
CONF_GET_SOURCES,
CONF_SCREENCAP,
CONF_STATE_DETECTION_RULES,
CONF_TURN_OFF_COMMAND,
CONF_TURN_ON_COMMAND,
)
if key in config
}
# save option to use with entry
if config_options:
config_data[CONF_MIGRATION_OPTIONS] = config_options
# Launch config entries setup
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config_data
)
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Android TV entity."""
aftv = hass.data[DOMAIN][entry.entry_id][ANDROID_DEV]
device_class = aftv.DEVICE_CLASS
device_type = "Android TV" if device_class == DEVICE_ANDROIDTV else "Fire TV"
if CONF_NAME in entry.data:
device_name = entry.data[CONF_NAME]
else:
device_name = f"{device_type} {entry.data[CONF_HOST]}"
device_args = [
aftv,
device_name,
device_type,
entry.unique_id,
entry.entry_id,
hass.data[DOMAIN][entry.entry_id],
]
async_add_entities(
[
AndroidTVDevice(*device_args)
if device_class == DEVICE_ANDROIDTV
else FireTVDevice(*device_args)
]
)
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
SERVICE_ADB_COMMAND,
{vol.Required(ATTR_COMMAND): cv.string},
"adb_command",
)
platform.async_register_entity_service(
SERVICE_LEARN_SENDEVENT, {}, "learn_sendevent"
)
platform.async_register_entity_service(
SERVICE_DOWNLOAD,
{
vol.Required(ATTR_DEVICE_PATH): cv.string,
vol.Required(ATTR_LOCAL_PATH): cv.string,
},
"service_download",
)
platform.async_register_entity_service(
SERVICE_UPLOAD,
{
vol.Required(ATTR_DEVICE_PATH): cv.string,
vol.Required(ATTR_LOCAL_PATH): cv.string,
},
"service_upload",
)
def adb_decorator(override_available=False):
"""Wrap ADB methods and catch exceptions.
Allows for overriding the available status of the ADB connection via the
`override_available` parameter.
"""
def _adb_decorator(func):
"""Wrap the provided ADB method and catch exceptions."""
@functools.wraps(func)
async def _adb_exception_catcher(self, *args, **kwargs):
"""Call an ADB-related method and catch exceptions."""
# pylint: disable=protected-access
if not self.available and not override_available:
return None
try:
return await func(self, *args, **kwargs)
except LockNotAcquiredException:
# If the ADB lock could not be acquired, skip this command
_LOGGER.info(
"ADB command not executed because the connection is currently in use"
)
return
except self.exceptions as err:
_LOGGER.error(
"Failed to execute an ADB command. ADB connection re-"
"establishing attempt in the next update. Error: %s",
err,
)
await self.aftv.adb_close()
self._attr_available = False
return None
except Exception:
# An unforeseen exception occurred. Close the ADB connection so that
# it doesn't happen over and over again, then raise the exception.
await self.aftv.adb_close()
self._attr_available = False
raise
return _adb_exception_catcher
return _adb_decorator
class ADBDevice(MediaPlayerEntity):
"""Representation of an Android TV or Fire TV device."""
def __init__(
self,
aftv,
name,
dev_type,
unique_id,
entry_id,
entry_data,
):
"""Initialize the Android TV / Fire TV device."""
self.aftv = aftv
self._attr_name = name
self._attr_unique_id = unique_id
self._entry_id = entry_id
self._entry_data = entry_data
info = aftv.device_properties
model = info.get(ATTR_MODEL)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
model=f"{model} ({dev_type})" if model else dev_type,
name=name,
)
if manufacturer := info.get(ATTR_MANUFACTURER):
self._attr_device_info[ATTR_MANUFACTURER] = manufacturer
if sw_version := info.get(ATTR_SW_VERSION):
self._attr_device_info[ATTR_SW_VERSION] = sw_version
if mac := format_mac(info.get(PROP_ETHMAC) or info.get(PROP_WIFIMAC, "")):
self._attr_device_info[ATTR_CONNECTIONS] = {(CONNECTION_NETWORK_MAC, mac)}
self._app_id_to_name = {}
self._app_name_to_id = {}
self._get_sources = DEFAULT_GET_SOURCES
self._exclude_unnamed_apps = DEFAULT_EXCLUDE_UNNAMED_APPS
self._screencap = DEFAULT_SCREENCAP
self.turn_on_command = None
self.turn_off_command = None
# ADB exceptions to catch
if not aftv.adb_server_ip:
# Using "adb_shell" (Python ADB implementation)
self.exceptions = (
AdbTimeoutError,
BrokenPipeError,
ConnectionResetError,
ValueError,
InvalidChecksumError,
InvalidCommandError,
InvalidResponseError,
TcpTimeoutException,
)
else:
# Using "pure-python-adb" (communicate with ADB server)
self.exceptions = (ConnectionResetError, RuntimeError)
# Property attributes
self._attr_extra_state_attributes = {
ATTR_ADB_RESPONSE: None,
ATTR_HDMI_INPUT: None,
}
def _process_config(self):
"""Load the config options."""
_LOGGER.debug("Loading configuration options")
options = self._entry_data[ANDROID_DEV_OPT]
apps = options.get(CONF_APPS, {})
self._app_id_to_name = APPS.copy()
self._app_id_to_name.update(apps)
self._app_name_to_id = {
value: key for key, value in self._app_id_to_name.items() if value
}
# Make sure that apps overridden via the `apps` parameter are reflected
# in `self._app_name_to_id`
for key, value in apps.items():
self._app_name_to_id[value] = key
self._get_sources = options.get(CONF_GET_SOURCES, DEFAULT_GET_SOURCES)
self._exclude_unnamed_apps = options.get(
CONF_EXCLUDE_UNNAMED_APPS, DEFAULT_EXCLUDE_UNNAMED_APPS
)
self._screencap = options.get(CONF_SCREENCAP, DEFAULT_SCREENCAP)
self.turn_off_command = options.get(CONF_TURN_OFF_COMMAND)
self.turn_on_command = options.get(CONF_TURN_ON_COMMAND)
async def async_added_to_hass(self):
"""Set config parameter when add to hass."""
await super().async_added_to_hass()
self._process_config()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SIGNAL_CONFIG_ENTITY}_{self._entry_id}",
self._process_config,
)
)
return
@property
def media_image_hash(self) -> str | None:
"""Hash value for media image."""
return f"{datetime.now().timestamp()}" if self._screencap else None
@adb_decorator()
async def _adb_screencap(self):
"""Take a screen capture from the device."""
return await self.aftv.adb_screencap()
async def async_get_media_image(self):
"""Fetch current playing image."""
if not self._screencap or self.state in (STATE_OFF, None) or not self.available:
return None, None
media_data = await self._adb_screencap()
if media_data:
return media_data, "image/png"
# If an exception occurred and the device is no longer available, write the state
if not self.available:
self.async_write_ha_state()
return None, None
@adb_decorator()
async def async_media_play(self):
"""Send play command."""
await self.aftv.media_play()
@adb_decorator()
async def async_media_pause(self):
"""Send pause command."""
await self.aftv.media_pause()
@adb_decorator()
async def async_media_play_pause(self):
"""Send play/pause command."""
await self.aftv.media_play_pause()
@adb_decorator()
async def async_turn_on(self):
"""Turn on the device."""
if self.turn_on_command:
await self.aftv.adb_shell(self.turn_on_command)
else:
await self.aftv.turn_on()
@adb_decorator()
async def async_turn_off(self):
"""Turn off the device."""
if self.turn_off_command:
await self.aftv.adb_shell(self.turn_off_command)
else:
await self.aftv.turn_off()
@adb_decorator()
async def async_media_previous_track(self):
"""Send previous track command (results in rewind)."""
await self.aftv.media_previous_track()
@adb_decorator()
async def async_media_next_track(self):
"""Send next track command (results in fast-forward)."""
await self.aftv.media_next_track()
@adb_decorator()
async def async_select_source(self, source):
"""Select input source.
If the source starts with a '!', then it will close the app instead of
opening it.
"""
if isinstance(source, str):
if not source.startswith("!"):
await self.aftv.launch_app(self._app_name_to_id.get(source, source))
else:
source_ = source[1:].lstrip()
await self.aftv.stop_app(self._app_name_to_id.get(source_, source_))
@adb_decorator()
async def adb_command(self, command):
"""Send an ADB command to an Android TV / Fire TV device."""
if key := KEYS.get(command):
await self.aftv.adb_shell(f"input keyevent {key}")
return
if command == "GET_PROPERTIES":
self._attr_extra_state_attributes[ATTR_ADB_RESPONSE] = str(
await self.aftv.get_properties_dict()
)
self.async_write_ha_state()
return
try:
response = await self.aftv.adb_shell(command)
except UnicodeDecodeError:
return
if isinstance(response, str) and response.strip():
self._attr_extra_state_attributes[ATTR_ADB_RESPONSE] = response.strip()
self.async_write_ha_state()
return
@adb_decorator()
async def learn_sendevent(self):
"""Translate a key press on a remote to ADB 'sendevent' commands."""
output = await self.aftv.learn_sendevent()
if output:
self._attr_extra_state_attributes[ATTR_ADB_RESPONSE] = output
self.async_write_ha_state()
msg = f"Output from service '{SERVICE_LEARN_SENDEVENT}' from {self.entity_id}: '{output}'"
self.hass.components.persistent_notification.async_create(
msg,
title="Android TV",
)
_LOGGER.info("%s", msg)
@adb_decorator()
async def service_download(self, device_path, local_path):
"""Download a file from your Android TV / Fire TV device to your Home Assistant instance."""
if not self.hass.config.is_allowed_path(local_path):
_LOGGER.warning("'%s' is not secure to load data from!", local_path)
return
await self.aftv.adb_pull(local_path, device_path)
@adb_decorator()
async def service_upload(self, device_path, local_path):
"""Upload a file from your Home Assistant instance to an Android TV / Fire TV device."""
if not self.hass.config.is_allowed_path(local_path):
_LOGGER.warning("'%s' is not secure to load data from!", local_path)
return
await self.aftv.adb_push(local_path, device_path)
class AndroidTVDevice(ADBDevice):
"""Representation of an Android TV device."""
_attr_supported_features = SUPPORT_ANDROIDTV
@adb_decorator(override_available=True)
async def async_update(self):
"""Update the device state and, if necessary, re-connect."""
# Check if device is disconnected.
if not self.available:
# Try to connect
self._attr_available = await self.aftv.adb_connect(always_log_errors=False)
# If the ADB connection is not intact, don't update.
if not self.available:
return
# Get the updated state and attributes.
(
state,
self._attr_app_id,
running_apps,
_,
self._attr_is_volume_muted,
self._attr_volume_level,
self._attr_extra_state_attributes[ATTR_HDMI_INPUT],
) = await self.aftv.update(self._get_sources)
self._attr_state = ANDROIDTV_STATES.get(state)
if self._attr_state is None:
self._attr_available = False
if running_apps:
self._attr_source = self._attr_app_name = self._app_id_to_name.get(
self._attr_app_id, self._attr_app_id
)
sources = [
self._app_id_to_name.get(
app_id, app_id if not self._exclude_unnamed_apps else None
)
for app_id in running_apps
]
self._attr_source_list = [source for source in sources if source]
else:
self._attr_source_list = None
@adb_decorator()
async def async_media_stop(self):
"""Send stop command."""
await self.aftv.media_stop()
@adb_decorator()
async def async_mute_volume(self, mute):
"""Mute the volume."""
await self.aftv.mute_volume()
@adb_decorator()
async def async_set_volume_level(self, volume):
"""Set the volume level."""
await self.aftv.set_volume_level(volume)
@adb_decorator()
async def async_volume_down(self):
"""Send volume down command."""
self._attr_volume_level = await self.aftv.volume_down(self._attr_volume_level)
@adb_decorator()
async def async_volume_up(self):
"""Send volume up command."""
self._attr_volume_level = await self.aftv.volume_up(self._attr_volume_level)
class FireTVDevice(ADBDevice):
"""Representation of a Fire TV device."""
_attr_supported_features = SUPPORT_FIRETV
@adb_decorator(override_available=True)
async def async_update(self):
"""Update the device state and, if necessary, re-connect."""
# Check if device is disconnected.
if not self.available:
# Try to connect
self._attr_available = await self.aftv.adb_connect(always_log_errors=False)
# If the ADB connection is not intact, don't update.
if not self.available:
return
# Get the `state`, `current_app`, `running_apps` and `hdmi_input`.
(
state,
self._attr_app_id,
running_apps,
self._attr_extra_state_attributes[ATTR_HDMI_INPUT],
) = await self.aftv.update(self._get_sources)
self._attr_state = ANDROIDTV_STATES.get(state)
if self._attr_state is None:
self._attr_available = False
if running_apps:
self._attr_source = self._app_id_to_name.get(
self._attr_app_id, self._attr_app_id
)
sources = [
self._app_id_to_name.get(
app_id, app_id if not self._exclude_unnamed_apps else None
)
for app_id in running_apps
]
self._attr_source_list = [source for source in sources if source]
else:
self._attr_source_list = None
@adb_decorator()
async def async_media_stop(self):
"""Send stop (back) command."""
await self.aftv.back()
| home-assistant/home-assistant | homeassistant/components/androidtv/media_player.py | Python | apache-2.0 | 22,083 |
# Given two strings, return True if either of the strings appears at the very
# end of the other string, ignoring upper/lower case differences (in other
# words, the computation should not be "case sensitive").
# end_other('Hiabc', 'abc') --> True
# end_other('AbC', 'HiaBc') --> True
# end_other('abc', 'abXabc') --> True
def end_other(a, b):
a_low = a.lower()
b_low = b.lower()
return (a_low.endswith(b_low) or b_low.endswith(a_low))
print(end_other('Hiabc', 'abc'))
print(end_other('AbC', 'HiaBc'))
print(end_other('abc', 'abXabc'))
| RCoon/CodingBat | Python/String_2/end_other.py | Python | mit | 549 |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (C) 2005-2009 Håvard Gulldahl
# <[email protected]>
#
# Lisens: GPL2
#
# $Id$
###########################################################################
__version__ = "2.0.6"
__all__ = ['fakturabibliotek','oppgradering','f60','cli', 'sikkerhetskopi', 'okonomi', 'epost', 'historikk', 'ekstra', 'fakturafeil', 'fakturakomponenter', 'rapport', 'gui']
| kkoksvik/finfaktura | finfaktura/__init__.py | Python | gpl-2.0 | 477 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from django.shortcuts import render, HttpResponse
from django.views.decorators.csrf import csrf_exempt
import hashlib
import xml.etree.ElementTree as ET
import time
from config import TOKEN
# Create your views here.
TOKEN = TOKEN
@csrf_exempt
def index(request):
if request.method == "GET":
global TOKEN
signature = request.GET.get('signature', None)
timestamp = request.GET.get('timestamp', None)
nonce = request.GET.get('nonce', None)
echoStr = request.GET.get('echostr', None)
token = TOKEN
tmpList = [token, timestamp, nonce]
tmpList.sort()
tmp_str = "%s%s%s" % tuple(tmpList)
tmp_str = hashlib.sha1(tmp_str).hexdigest()
if tmp_str == signature:
return HttpResponse(echoStr)
else:
return HttpResponse('Error')
elif request.method == "POST":
xml_msg = request.body
response = HttpResponse(response_msg(xml_msg), content_type="application/xml")
return response
MSG_TYPE_TEXT = "text"
def response_msg(msg):
tree = ET.fromstring(msg)
msg = parse_xml(tree)
res = ""
if msg['MsgType'] == MSG_TYPE_TEXT:
reply_content = "Hello"
res = get_reply_xml(msg, reply_content)
return res
def get_reply_xml(msg, reply_content):
template = '''
<xml>
<ToUserName><![CDATA[%s]]></ToUserName>
<FromUserName><![CDATA[%s]]></FromUserName>
<CreateTime>%s</CreateTime>
<MsgType><![CDATA[%s]]></MsgType>
<Content><![CDATA[%s]]></Content>
</xml>
'''
res = template % (msg['FromUserName'], msg['ToUserName'], str(int(time.time())), 'text', reply_content)
return res
def parse_xml(root_elm):
"""
:param root_elm:
:return: msg dict
"""
msg = {}
if root_elm.tag == 'xml':
for child in root_elm:
msg[child.tag] = child.text
return msg
| bucketzxm/wechat_template | movie/views.py | Python | gpl-3.0 | 2,018 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2012 OpenPlans
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf.urls import patterns, url
from django.contrib.auth.decorators import login_required
from django.views.generic import TemplateView
from .views import DocumentUploadView, DocumentUpdateView
js_info_dict = {
'packages': ('geonode.documents',),
}
urlpatterns = patterns('geonode.documents.views',
url(r'^$',
TemplateView.as_view(template_name='documents/document_list.html'),
name='document_browse'),
url(r'^(?P<docid>\d+)/?$',
'document_detail',
name='document_detail'),
url(r'^(?P<docid>\d+)/download/?$',
'document_download',
name='document_download'),
url(r'^(?P<docid>\d+)/replace$',
login_required(DocumentUpdateView.as_view()),
name="document_replace"),
url(r'^(?P<docid>\d+)/remove$',
'document_remove',
name="document_remove"),
url(r'^upload/?$',
login_required(DocumentUploadView.as_view()),
name='document_upload'),
url(r'^search/?$',
'document_search_page',
name='document_search_page'),
url(r'^(?P<docid>\d+)/metadata$',
'document_metadata',
name='document_metadata'),
)
| boedy1996/SPARC | geonode/documents/urls.py | Python | gpl-3.0 | 2,485 |
"""
Copyright 2014-2021 Vincent Texier <[email protected]>
DuniterPy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
DuniterPy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from typing import List, Dict, Any
from duniterpy.api import bma
from duniterpy.api.client import Client
from duniterpy.documents.peer import Peer, MalformedDocumentError
from duniterpy.documents.ws2p.heads import HeadV2
from itertools import groupby
from duniterpy.key import VerifyingKey
async def get_available_nodes(client: Client) -> List[List[Dict[str, Any]]]:
"""
Get available nodes grouped and sorted by descending blockstamp
Each entry is a list of nodes (HeadV2 instance, inline endpoint list) sharing the same blockstamp:
[
[{"head": HeadV2, "endpoints": [str, ...]}, ...],
[{"head": HeadV2, "endpoints": [str, ...]}, ...],
...
]
You can just select the first endpoint of the first node of the first group to quickly get an available node.
groups = get_available_nodes(client)
first_node_first_endpoint = groups[0][0]["endpoints"][0]
If node is down, you can select another node.
Warning : only nodes with BMAS, BASIC_MERKLED_API, GVA and GVASUB endpoint are selected
and only those endpoints are available in the endpoint list
:param client: Client instance
:return:
"""
# capture heads and peers
heads_response = await client(bma.network.ws2p_heads)
peers_response = await client(bma.network.peers)
# get heads instances from WS2P messages
heads = []
for entry in heads_response["heads"]:
head, _ = HeadV2.from_inline(entry["messageV2"], entry["sigV2"])
heads.append(head)
# sort by blockstamp by descending order
heads = sorted(heads, key=lambda x: x.blockstamp, reverse=True)
# group heads by blockstamp
groups = []
for _, group in groupby(heads, key=lambda x: x.blockstamp):
nodes = []
for head in list(group):
# if head signature not valid...
if VerifyingKey(head.pubkey).verify_ws2p_head(head) is False:
# skip this node
continue
bma_peers = [
bma_peer
for bma_peer in peers_response["peers"]
if bma_peer["pubkey"] == head.pubkey
]
# if no peer found...
if len(bma_peers) == 0:
# skip this node
continue
bma_peer = bma_peers[0]
try:
peer = Peer.from_bma(bma_peer)
# if bad peer... (mostly bad formatted endpoints)
except MalformedDocumentError:
# skip this node
continue
# set signature in Document
peer.signatures = [bma_peer["signature"]]
# if peer signature not valid
if VerifyingKey(head.pubkey).verify_document(peer) is False:
# skip this node
continue
# filter endpoints to get only BMAS, BASIC_MERKLED_API, GVA or GVASUB
endpoints = [
endpoint
for endpoint in bma_peers[0]["endpoints"]
if endpoint.startswith("BMAS")
or endpoint.startswith("BASIC_MERKLED_API")
or endpoint.startswith("GVA")
or endpoint.startswith("GVASUB")
]
if len(endpoints) == 0:
# skip this node
continue
# add node to group nodes
nodes.append({"head": head, "endpoints": endpoints})
# if nodes in group...
if len(nodes) > 0:
# add group to groups
groups.append(nodes)
return groups
| duniter/duniter-python-api | duniterpy/helpers/network.py | Python | gpl-3.0 | 4,268 |
from core.forms import BaseDatasetForm
class GenBankFastaForm(BaseDatasetForm):
"""The methods needed by this class are very basic."""
pass
| carlosp420/VoSeq | genbank_fasta/forms.py | Python | bsd-3-clause | 150 |
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
| designcc/django-ccstraps | ccstraps/views.py | Python | bsd-3-clause | 178 |
# C# generator
boilerplate = """\
using System;
using System.ComponentModel.DataAnnotations;
using ArkeIndustries.RequestServer;
using ArkeIndustries.RequestServer.DataAnnotations;
#pragma warning disable 0649
namespace ArkeIndustries.Starfall.Api {
public sealed class ServerId {
% for server in ast["servers"]:
public const int ${server["name"]} = ${2**loop.index};
% endfor
}
public sealed class NotificationType {
% for notif in ast["notifications"]:
public static ushort ${notif["name"]} { get; } = ${loop.index};
% endfor
}
public sealed class ResponseCode : ArkeIndustries.RequestServer.ResponseCode {
% for resp in ast["response_codes"]:
public static ushort ${resp["name"]} { get; } = ${loop.index + 100};
% endfor
}
% for cat in ast["categories"]:
namespace ${cat["name"]} {
% for meth in cat["methods"]:
[MessageDefinition(ServerId = ${meth["server_ids"]}, AuthenticationRequired = ${"auth" in meth["meta"] and meth["meta"]["auth"]})]
% for attr in meth["attrs"]:
${attr}
% endfor
public partial class ${meth["name"]} : ${meth["base_class"]} {
public override ushort Category { get; } = ${loop.parent.index};
public override ushort Method { get; } = ${loop.index};
% if meth["is_list"]:
% for inp in meth["inputs"]:
[MessageInput(Index = ${meth["name"]}.InputStartIndex + ${loop.index})]
% for attr in inp["attrs"]:
${attr}
% endfor
public ${inp["type"]} ${inp["name"]} { get; set; }
% endfor
public class ${meth["list_class_name"]} {
% for out in meth["outputs"]:
[MessageOutput(Index = ${loop.index})]
% for attr in out["attrs"]:
${attr}
% endfor
public ${out["type"]} ${out["name"]} { get; set; }
% endfor
}
% else:
% for inp in meth["inputs"]:
[MessageInput(Index = ${loop.index})]
% for attr in inp["attrs"]:
${attr}
% endfor
public ${inp["type"]} ${inp["name"]} { get; set; }
% endfor
% for out in meth["outputs"]:
[MessageOutput(Index = ${loop.index})]
% for attr in out["attrs"]:
${attr}
% endfor
public ${out["type"]} ${out["name"]} { get; set; }
% endfor
% endif
}
% endfor
}
% endfor
}
"""
builtins = {
"u64": "ulong",
"i64": "long",
"u32": "uint",
"i32": "int",
"u16": "ushort",
"i16": "short",
"u8": "byte",
"i8": "sbyte"
}
def trans(ast, typename):
if typename in builtins:
return builtins[typename]
else:
for t in ast["types"]:
if typename == t["name"]:
return trans(ast, t["type"])
# well, we tried. assume it's meaningful for now
return typename
def process_types(ast):
"Replace references to types with their correct type, and replace with the C# typename"
# hilariously bad exponential algorithm
while True:
touched_any = False
for t1 in ast["types"]:
t1["val"] = trans(ast, t1["val"])
for t2 in ast["types"]:
if t2["val"] == t1["name"]:
t2["val"] = trans(ast, t1["val"])
touched_any = True
break
if not touched_any:
break
def propagate_server_attrs(ast):
for cat in ast["categories"]:
for meth in cat["methods"]:
meth["meta"] = {}
for prop in cat["properties"]:
for meth in cat["methods"]:
if prop["name"] not in meth["meta"]:
if prop["name"] == "server":
if "server" not in meth["meta"]:
meth["meta"]["server"] = []
meth["meta"]["server"].append(prop["val"])
else:
meth["meta"][prop["name"]] = prop["val"]
def sattr(a):
if a["plain"] is not None:
return a["plain"]
else:
if len(a["args"]) != 0:
return "%s(%s)" % (a["name"], ", ".join(sattr(a) for a in a["args"] if len(a) != 0))
else:
return "%s = %s" % (a["name"], a["val"])
def normalize_methods(ast):
"Simplify the complex property structure of methods and format the fields of the classes"
for cat in ast["categories"]:
for meth in cat["methods"]:
if any(map(lambda a: a["name"] == "List", meth["attributes"])):
lattr = [m for m in meth["attributes"] if m["name"] == "List"][0]
# simplify list attributes
meth["list_class_name"] = [n for n in lattr["args"] if "name" in n and n["name"] == "Class"][0]["val"].replace('"', '')
meth["base_class"] = "ListQueryMessageHandler<Objects.Database.Context, %s.%s>" % (meth["name"], meth["list_class_name"])
meth["is_list"] = True
else:
meth["base_class"] = "MessageHandler<Objects.Database.Context>"
meth["server_ids"] = " | ".join("ServerId.%s" % s for s in meth["meta"]["server"])
meth["attrs"] = ["[%s]" % sattr(a) for a in meth["attributes"] if len(a) != 0 and a["name"] != "List"]
inps = [m for m in meth["properties"] if m["name"] == "in" ]
outs = [m for m in meth["properties"] if m["name"] == "out"]
meth["inputs"] = []
meth["outputs"] = []
if inps:
for inp in inps[0]["type"]["obj"]:
meth["inputs"].append({
"name": inp["name"],
"type": trans(ast, inp["type"]),
"attrs": ["[%s]" % sattr(a) for a in inp["attributes"]]
})
if outs:
for out in outs[0]["type"]["obj"]:
meth["outputs"].append({
"name": out["name"],
"type": trans(ast, out["type"]),
"attrs": ["[%s]" % sattr(a) for a in out["attributes"]]
})
def generate(ast):
from mako.template import Template
process_types(ast)
propagate_server_attrs(ast)
normalize_methods(ast)
return Template(boilerplate).render(ast=ast)
| cmr/Protogen | generators/cs.py | Python | mit | 6,570 |
from django.db import models
from brain.utils import text_snippet_start
class Classification(models.Model):
created = models.DateTimeField(auto_now_add=True)
last_update = models.DateTimeField(auto_now=True)
tag = models.TextField(editable=False, null=True, blank=True)
owner = models.CharField(
max_length=100, db_index=True, editable=False, null=True, blank=True)
name = models.CharField(max_length=100, db_index=True)
def __unicode__(self):
return self.name
class Meta:
verbose_name = 'Classification'
verbose_name_plural = 'Classifications'
class Data(models.Model):
created = models.DateTimeField(auto_now_add=True)
last_update = models.DateTimeField(auto_now=True)
tag = models.TextField(editable=False, null=True, blank=True)
owner = models.CharField(
max_length=100, db_index=True, editable=False, null=True, blank=True)
classification = models.ManyToManyField(Classification, related_name='my_data')
content = models.TextField(null=True, blank=True, db_index=True)
def __unicode__(self):
return self.get_preview()
def get_preview(self):
return text_snippet_start(self.content, max_len=10)
get_preview.short_description = 'Preview'
class Meta:
verbose_name = 'Data'
verbose_name_plural = 'Data'
| NuChwezi/nubrain | brain/models.py | Python | mit | 1,354 |
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/topics/items.html
from scrapy.item import Item, Field
class ElUniversoItem(Item):
# define the fields for your item here like:
# name = Field()
# title = Field()
content = Field()
pass
| j3nnn1/topic_model | scrapy/el_universo/el_universo/items.py | Python | mit | 304 |
from sklearn2sql_heroku.tests.classification import generic as class_gen
class_gen.test_model("DummyClassifier" , "digits" , "sqlite")
| antoinecarme/sklearn2sql_heroku | tests/classification/digits/ws_digits_DummyClassifier_sqlite_code_gen.py | Python | bsd-3-clause | 137 |
###########################################################################
#
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
# Ad Group SDF Schema for API version 4.2
SDF_AdGroup_Schema = [{
"type": "STRING",
"name": "Ad_Group_Id",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Line_Item_Id",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Name",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Status",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Video_Ad_Format",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Max_Cost",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Popular_Videos_Bid_Adjustment",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Keyword_Targeting_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Keyword_Targeting_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Category_Targeting_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Category_Targeting_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_YouTube_Channels_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_YouTube_Channels_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_YouTube_Videos_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_YouTube_Videos_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_URLs_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_URLs_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_Apps_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_Apps_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_App_Collections_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Placement_Targeting_App_Collections_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Demographic_Targeting_Gender",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Demographic_Targeting_Age",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Demographic_Targeting_Household_Income",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Demographic_Targeting_Parental_Status",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Audience_Targeting_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Audience_Targeting_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Affinity_And_In_Market_Targeting_Include",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Affinity_And_In_Market_Targeting_Exclude",
"mode": "NULLABLE"
}, {
"type": "STRING",
"name": "Custom_List_Targeting",
"mode": "NULLABLE"
}]
| google/orchestra | orchestra/google/marketing_platform/utils/schema/sdf/v4_2/AdGroup.py | Python | apache-2.0 | 3,641 |
from django.conf.urls import include, patterns, url
from django.views.decorators.cache import never_cache
from . import views
services_patterns = patterns('',
url('^paypal$', never_cache(views.paypal), name='amo.paypal'),
)
urlpatterns = patterns('',
('', include(services_patterns)),
)
| wagnerand/zamboni | apps/paypal/urls.py | Python | bsd-3-clause | 298 |
import math
def Cosine(vec1, vec2) :
result = InnerProduct(vec1,vec2) / (VectorSize(vec1) * VectorSize(vec2))
return result
def VectorSize(vec) :
return math.sqrt(sum(math.pow(v,2) for v in vec))
def InnerProduct(vec1, vec2) :
return sum(v1*v2 for v1,v2 in zip(vec1,vec2))
def Euclidean(vec1, vec2) :
return math.sqrt(sum(math.pow((v1-v2),2) for v1,v2 in zip(vec1, vec2)))
def Theta(vec1, vec2) :
return math.acos(Cosine(vec1,vec2)) + math.radians(10)
def Triangle(vec1, vec2) :
theta = math.radians(Theta(vec1,vec2))
return (VectorSize(vec1) * VectorSize(vec2) * math.sin(theta)) / 2
def Magnitude_Difference(vec1, vec2) :
return abs(VectorSize(vec1) - VectorSize(vec2))
def Sector(vec1, vec2) :
ED = Euclidean(vec1, vec2)
MD = Magnitude_Difference(vec1, vec2)
theta = Theta(vec1, vec2)
return math.pi * math.pow((ED+MD),2) * theta/360
def TS_SS(vec1, vec2) :
return Triangle(vec1, vec2) * Sector(vec1, vec2)
vec1 = [1,2]
vec2 = [2,4]
print(Euclidean(vec1,vec2))
print(Cosine(vec1,vec2))
print(TS_SS(vec1,vec2)) | taki0112/Vector_Similarity | python/TS_SS/Vector_Similarity.py | Python | mit | 1,079 |
"""This is an TensorFLow implementation of AlexNet by Alex Krizhevsky at all.
Paper:
(http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks.pdf)
@original author: Frederik Kratzert (contact: f.kratzert(at)gmail.com)
"""
import tensorflow as tf
import numpy as np
class AlexNet(object):
"""Implementation of the AlexNet."""
def __init__(self, x, keep_prob, num_classes, skip_layer,
weights_path='DEFAULT'):
"""Create the graph of the AlexNet model.
Args:
x: Placeholder for the input tensor.
keep_prob: Dropout probability.
num_classes: Number of classes in the dataset.
skip_layer: List of names of the layer, that get trained from
scratch
weights_path: Complete path to the pretrained weight file, if it
isn't in the same folder as this code
"""
# Parse input arguments into class variables
self.X = x
self.NUM_CLASSES = num_classes
self.KEEP_PROB = keep_prob
self.SKIP_LAYER = skip_layer
if weights_path == 'DEFAULT':
self.WEIGHTS_PATH = 'bvlc_alexnet.npy'
else:
self.WEIGHTS_PATH = weights_path
# Call the create function to build the computational graph of AlexNet
self.create()
def create(self):
"""Create the network graph."""
# 1st Layer: Conv (w ReLu) -> Lrn -> Pool
conv1 = conv(self.X, 11, 11, 96, 4, 4, padding='VALID', name='conv1')
norm1 = lrn(conv1, 2, 2e-05, 0.75, name='norm1')
pool1 = max_pool(norm1, 3, 3, 2, 2, padding='VALID', name='pool1')
# 2nd Layer: Conv (w ReLu) -> Lrn -> Pool with 2 groups
conv2 = conv(pool1, 5, 5, 256, 1, 1, groups=2, name='conv2')
norm2 = lrn(conv2, 2, 2e-05, 0.75, name='norm2')
pool2 = max_pool(norm2, 3, 3, 2, 2, padding='VALID', name='pool2')
# 3rd Layer: Conv (w ReLu)
conv3 = conv(pool2, 3, 3, 384, 1, 1, name='conv3')
# 4th Layer: Conv (w ReLu) splitted into two groups
conv4 = conv(conv3, 3, 3, 384, 1, 1, groups=2, name='conv4')
# 5th Layer: Conv (w ReLu) -> Pool splitted into two groups
conv5 = conv(conv4, 3, 3, 256, 1, 1, groups=2, name='conv5')
pool5 = max_pool(conv5, 3, 3, 2, 2, padding='VALID', name='pool5')
# 6th Layer: Flatten -> FC (w ReLu) -> Dropout
flattened = tf.reshape(pool5, [-1, 6*6*256])
fc6 = fc(flattened, 6*6*256, 4096, name='fc6')
dropout6 = dropout(fc6, self.KEEP_PROB)
# 7th Layer: FC (w ReLu) -> Dropout
fc7 = fc(dropout6, 4096, 4096, name='fc7')
dropout7 = dropout(fc7, self.KEEP_PROB)
# 8th Layer: FC and return unscaled activations
self.fc8 = fc(dropout7, 4096, self.NUM_CLASSES, relu=False, name='fc8')
def load_initial_weights(self, session):
"""Load weights from file into network.
As the weights from http://www.cs.toronto.edu/~guerzhoy/tf_alexnet/
come as a dict of lists (e.g. weights['conv1'] is a list) and not as
dict of dicts (e.g. weights['conv1'] is a dict with keys 'weights' &
'biases') we need a special load function
"""
# Load the weights into memory
weights_dict = np.load(self.WEIGHTS_PATH, encoding='bytes').item()
# Loop over all layer names stored in the weights dict
for op_name in weights_dict:
# Check if layer should be trained from scratch
if op_name not in self.SKIP_LAYER:
with tf.variable_scope(op_name, reuse=True):
# Assign weights/biases to their corresponding tf variable
for data in weights_dict[op_name]:
# Biases
if len(data.shape) == 1:
var = tf.get_variable('biases', trainable=False)
session.run(var.assign(data))
# Weights
else:
var = tf.get_variable('weights', trainable=False)
session.run(var.assign(data))
def conv(x, filter_height, filter_width, num_filters, stride_y, stride_x, name,
padding='SAME', groups=1):
"""Create a convolution layer.
Adapted from: https://github.com/ethereon/caffe-tensorflow
"""
# Get number of input channels
input_channels = int(x.get_shape()[-1])
# Create lambda function for the convolution
convolve = lambda i, k: tf.nn.conv2d(i, k,
strides=[1, stride_y, stride_x, 1],
padding=padding)
with tf.variable_scope(name) as scope:
# Create tf variables for the weights and biases of the conv layer
weights = tf.get_variable('weights', shape=[filter_height,
filter_width,
input_channels/groups,
num_filters])
biases = tf.get_variable('biases', shape=[num_filters])
if groups == 1:
conv = convolve(x, weights)
# In the cases of multiple groups, split inputs & weights and
else:
# Split input and weights and convolve them separately
input_groups = tf.split(axis=3, num_or_size_splits=groups, value=x)
weight_groups = tf.split(axis=3, num_or_size_splits=groups,
value=weights)
output_groups = [convolve(i, k) for i, k in zip(input_groups, weight_groups)]
# Concat the convolved output together again
conv = tf.concat(axis=3, values=output_groups)
# Add biases
bias = tf.reshape(tf.nn.bias_add(conv, biases), tf.shape(conv))
# Apply relu function
relu = tf.nn.relu(bias, name=scope.name)
return relu
def fc(x, num_in, num_out, name, relu=True):
"""Create a fully connected layer."""
with tf.variable_scope(name) as scope:
# Create tf variables for the weights and biases
weights = tf.get_variable('weights', shape=[num_in, num_out],
trainable=True)
biases = tf.get_variable('biases', [num_out], trainable=True)
# Matrix multiply weights and inputs and add bias
act = tf.nn.xw_plus_b(x, weights, biases, name=scope.name)
if relu:
# Apply ReLu non linearity
relu = tf.nn.relu(act)
return relu
else:
return act
def max_pool(x, filter_height, filter_width, stride_y, stride_x, name,
padding='SAME'):
"""Create a max pooling layer."""
return tf.nn.max_pool(x, ksize=[1, filter_height, filter_width, 1],
strides=[1, stride_y, stride_x, 1],
padding=padding, name=name)
def lrn(x, radius, alpha, beta, name, bias=1.0):
"""Create a local response normalization layer."""
return tf.nn.local_response_normalization(x, depth_radius=radius,
alpha=alpha, beta=beta,
bias=bias, name=name)
def dropout(x, keep_prob):
"""Create a dropout layer."""
return tf.nn.dropout(x, keep_prob)
| JasonHanG/tensor-gallery | alexNet-finetune/alexnet.py | Python | apache-2.0 | 7,368 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""HTML utilities."""
from __future__ import absolute_import
from HTMLParser import HTMLParser
from six import iteritems
from werkzeug.local import LocalProxy
from invenio.base.globals import cfg
from invenio.utils.text import indent_text, encode_for_xml
default_ln = lambda ln: cfg['CFG_SITE_LANG'] if ln is None else ln
import re
import cgi
import os
import json
try:
from BeautifulSoup import BeautifulSoup
CFG_BEAUTIFULSOUP_INSTALLED = True
except ImportError:
CFG_BEAUTIFULSOUP_INSTALLED = False
try:
import tidy
CFG_TIDY_INSTALLED = True
except ImportError:
CFG_TIDY_INSTALLED = False
# List of allowed tags (tags that won't create any XSS risk)
CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST = ('a',
'p', 'br', 'blockquote',
'strong', 'b', 'u', 'i', 'em',
'ul', 'ol', 'li', 'sub', 'sup', 'div', 'strike')
# List of allowed attributes. Be cautious, some attributes may be risky:
# <p style="background: url(myxss_suite.js)">
CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST = ('href', 'name', 'class')
## precompile some often-used regexp for speed reasons:
RE_HTML = re.compile("(?s)<[^>]*>|&#?\w+;")
RE_HTML_WITHOUT_ESCAPED_CHARS = re.compile("(?s)<[^>]*>")
# url validation regex
regex_url = re.compile(r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain...
r'localhost|' #localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
def nmtoken_from_string(text):
"""
Returns a Nmtoken from a string.
It is useful to produce XHTML valid values for the 'name'
attribute of an anchor.
CAUTION: the function is surjective: 2 different texts might lead to
the same result. This is improbable on a single page.
Nmtoken is the type that is a mixture of characters supported in
attributes such as 'name' in HTML 'a' tag. For example,
<a name="Articles%20%26%20Preprints"> should be tranformed to
<a name="Articles372037263720Preprints"> using this function.
http://www.w3.org/TR/2000/REC-xml-20001006#NT-Nmtoken
Also note that this function filters more characters than
specified by the definition of Nmtoken ('CombiningChar' and
'Extender' charsets are filtered out).
"""
text = text.replace('-', '--')
return ''.join( [( ((not char.isalnum() and not char in ['.', '-', '_', ':']) and str(ord(char))) or char)
for char in text] )
def escape_html(text, escape_quotes=False):
"""Escape all HTML tags, avoiding XSS attacks.
< => <
> => >
& => &:
@param text: text to be escaped from HTML tags
@param escape_quotes: if True, escape any quote mark to its HTML entity:
" => "
' => '
"""
text = text.replace('&', '&')
text = text.replace('<', '<')
text = text.replace('>', '>')
if escape_quotes:
text = text.replace('"', '"')
text = text.replace("'", ''')
return text
CFG_JS_CHARS_MAPPINGS = {
'\\': '\\\\',
"'": "\\'",
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
'\v': '\\v',
}
for i in range(0x20):
CFG_JS_CHARS_MAPPINGS.setdefault(chr(i), '\\u%04x' % (i,))
for i in (0x2028, 0x2029):
CFG_JS_CHARS_MAPPINGS.setdefault(unichr(i), '\\u%04x' % (i,))
RE_ESCAPE_JS_CHARS = re.compile(u'''[\\x00-\\x1f\\\\"\\\\'\\b\\f\\n\\r\\t\\v\u2028\u2029]''')
RE_CLOSING_SCRIPT_TAG = re.compile('</script>', re.IGNORECASE)
def escape_javascript_string(text, escape_for_html=True, escape_quote_for_html=False, escape_CDATA=True, escape_script_tag_with_quote='"'):
"""
Escape text in order to be used as Javascript string in various
context.
Examples::
>>> text = '''"Are you a Munchkin?" asked Dorothy.
"No, but I am their friend"'''
>>> escape_javascript_string(text)
>>> \\""Are you a Munchkin?\\" asked Dorothy.\\n\\"No, but I am their friend\\"'
The returned string can be enclosed either in single or double
quotes delimiters.
THE FUNCTION ASSUME THAT YOU HAVE ALREDADY WASHED THE STRING FROM
UNSAFE CONTENT, according to the context you plan to use the
string. The function will just make sure that the string will not
break you Javascript/HTML code/markup.
If you plan to include the string inside the body of an HTML page,
you will probably want to set C{escape_for_html} to True, in order
to produce XHTML-valid pages when the input string contain
characters such as < , > and &.
Furthermore if you plan to include the string as part of a tag
attribute (for eg. <a href="#" onclick="foo"bar"), you might
want to set C{escape_quote_for_html} to True.
If you plan to include the string inside the body of an HTML page,
enclosed by CDATA delimiters, then you would *not* need to escape
HTML tags. Using CDATA delimeters enables to include Javascript
strings meant to refer to HTML tags (eg. in case you would like to
manipulate the DOM tree to add new nodes to the page), which would
not be possible when escaping the HTML. For eg.:
/*<![CDATA[*/
document.getElementById('foo').innerHTML = '<p>bar</p>'
/*]]>*/
In this case you will probably want to set C{escape_CDATA} to True
in order to produce an XHTML-valid document, in case a closing
CDATA delimeter is in your input string. Parameter C{escape_CDATA}
is not considered when C{escape_for_html} is set to True.
Note that CDATA delimiters might be automatically added by the
browser, based on the content-type used to serve the page.
When C{escape_for_html} is set to False, whatever option is chosen
for C{escape_CDATA}, the string must not contain a '</script>' tag
(apparently...). The only option to keep this '</script>' tag (if
you need it) is to split it, which requires to know which quote
delimiter your plan to use. For eg:
Examples::
>>> text = '''foo</script>bar'''
>>> val = escape_javascript_string(text, escape_for_html=False, escape_script_tag_with_quote='"')
>>> 'foo</scr"+"ipt>bar'
>>> mycode = '''alert("%s")''' % val
C{escape_script_tag_with_quote} is not considered when
C{escape_for_html} is set to True.
If you are planning to return the string as part of a pure
Javascript document, then you should in principle set both
C{escape_for_html} and C{escape_CDATA} to False, and
C{escape_script_tag_with_quote} to None.
@param text: string to be escaped
@param escape_for_html: if True, also escape input for HTML
@param escape_CDATA: if True, escape closing CDATA tags (when C{escape_for_html} is False)
@escape_script_tag_with_quote: which quote will be used to delimit your string, in case you must wash, but keep, C{</script>} tag (when C{escape_for_html} is False)
"""
if escape_quote_for_html:
text = text.replace('"', '"')
if escape_for_html:
text = cgi.escape(text)
elif escape_CDATA:
text = text.replace(']]>', ']]]]><![CDATA[>')
text = json.dumps(text)[1:-1].replace("'", "\\'")
if not escape_for_html and escape_script_tag_with_quote:
text = RE_CLOSING_SCRIPT_TAG.sub('''</scr%(q)s+%(q)sipt>''' % {'q': escape_script_tag_with_quote}, text)
return text
class HTMLWasher(HTMLParser):
"""
Creates a washer for HTML, avoiding XSS attacks. See wash function for
details on parameters.
Usage::
from invenio.utils.html import HTMLWasher
washer = HTMLWasher()
escaped_text = washer.wash(unescaped_text)
Examples::
a.wash('Spam and <b><blink>eggs</blink></b>')
=> 'Spam and <b>eggs</b>'
a.wash('Spam and <b><blink>eggs</blink></b>', True)
=> 'Spam and <b><blink>eggs</blink></b>'
a.wash('Spam and <b><a href="python.org">eggs</u></b>')
=> 'Spam and <b><a href="python.org">eggs</a></b>'
a.wash('Spam and <b><a href="javascript:xss();">eggs</a></b>')
=>'Spam and <b><a href="">eggs</a></b>'
a.wash('Spam and <b><a href="jaVas cRipt:xss();">poilu</a></b>')
=>'Spam and <b><a href="">eggs</a></b>'
"""
silent = False
def __init__(self):
""" Constructor; initializes washer """
HTMLParser.__init__(self)
self.result = ''
self.nb = 0
self.previous_nbs = []
self.previous_type_lists = []
self.url = ''
self.render_unallowed_tags = False
self.allowed_tag_whitelist = \
CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST
self.allowed_attribute_whitelist = \
CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST
# javascript:
self.re_js = re.compile( ".*(j|j|J)"\
"\s*(a|a|A)"\
"\s*(v|v|V)"\
"\s*(a|a|A)"\
"\s*(s|s|S)"\
"\s*(c|c|C)"\
"\s*(r|r|R)"\
"\s*(i|Ã|I)"\
"\s*(p|p|P)"\
"\s*(t|p|T)"\
"\s*(:|:).*", re.IGNORECASE | re.DOTALL)
# vbscript:
self.re_vb = re.compile( ".*(v|v|V)"\
"\s*(b|b|B)"\
"\s*(s|s|S)"\
"\s*(c|c|C)"\
"\s*(r|r|R)"\
"\s*(i|Ã|I)"\
"\s*(p|p|P)"\
"\s*(t|p|T)"\
"\s*(:|:).*", re.IGNORECASE | re.DOTALL)
def wash(self, html_buffer,
render_unallowed_tags=False,
allowed_tag_whitelist=CFG_HTML_BUFFER_ALLOWED_TAG_WHITELIST,
automatic_link_transformation=False,
allowed_attribute_whitelist=\
CFG_HTML_BUFFER_ALLOWED_ATTRIBUTE_WHITELIST):
"""
Wash HTML buffer, escaping XSS attacks.
@param html_buffer: text to escape
@param render_unallowed_tags: if True, print unallowed tags escaping
< and >. Else, only print content of unallowed tags.
@param allowed_tag_whitelist: list of allowed tags
@param allowed_attribute_whitelist: list of allowed attributes
"""
self.reset()
self.result = ''
self.nb = 0
self.previous_nbs = []
self.previous_type_lists = []
self.url = ''
self.render_unallowed_tags = render_unallowed_tags
self.automatic_link_transformation = automatic_link_transformation
self.allowed_tag_whitelist = allowed_tag_whitelist
self.allowed_attribute_whitelist = allowed_attribute_whitelist
self.feed(html_buffer)
self.close()
return self.result
def handle_starttag(self, tag, attrs):
"""Function called for new opening tags"""
if tag.lower() in self.allowed_tag_whitelist:
self.result += '<' + tag
for (attr, value) in attrs:
if attr.lower() in self.allowed_attribute_whitelist:
self.result += ' %s="%s"' % \
(attr, self.handle_attribute_value(value))
self.result += '>'
else:
if self.render_unallowed_tags:
self.result += '<' + cgi.escape(tag)
for (attr, value) in attrs:
self.result += ' %s="%s"' % \
(attr, cgi.escape(value, True))
self.result += '>'
elif tag == 'style' or tag == 'script':
# In that case we want to remove content too
self.silent = True
def handle_data(self, data):
"""Function called for text nodes"""
if not self.silent:
possible_urls = re.findall(r'(https?://[\w\d:#%/;$()~_?\-=\\\.&]*)', data)
# validate possible urls
# we'll transform them just in case
# they are valid.
if possible_urls and self.automatic_link_transformation:
for url in possible_urls:
if regex_url.search(url):
transformed_url = '<a href="%s">%s</a>' % (url, url)
data = data.replace(url, transformed_url)
self.result += data
else:
self.result += cgi.escape(data, True)
def handle_endtag(self, tag):
"""Function called for ending of tags"""
if tag.lower() in self.allowed_tag_whitelist:
self.result += '</' + tag + '>'
else:
if self.render_unallowed_tags:
self.result += '</' + cgi.escape(tag) + '>'
if tag == 'style' or tag == 'script':
self.silent = False
def handle_startendtag(self, tag, attrs):
"""Function called for empty tags (e.g. <br />)"""
if tag.lower() in self.allowed_tag_whitelist:
self.result += '<' + tag
for (attr, value) in attrs:
if attr.lower() in self.allowed_attribute_whitelist:
self.result += ' %s="%s"' % \
(attr, self.handle_attribute_value(value))
self.result += ' />'
else:
if self.render_unallowed_tags:
self.result += '<' + cgi.escape(tag)
for (attr, value) in attrs:
self.result += ' %s="%s"' % \
(attr, cgi.escape(value, True))
self.result += ' />'
def handle_attribute_value(self, value):
"""Check attribute. Especially designed for avoiding URLs in the form:
javascript:myXSSFunction();"""
if self.re_js.match(value) or self.re_vb.match(value):
return ''
return value
def handle_charref(self, name):
"""Process character references of the form "&#ref;". Return it as it is."""
self.result += '&#' + name + ';'
def handle_entityref(self, name):
"""Process a general entity reference of the form "&name;".
Return it as it is."""
self.result += '&' + name + ';'
def tidy_html(html_buffer, cleaning_lib='utidylib'):
"""
Tidy up the input HTML using one of the installed cleaning
libraries.
@param html_buffer: the input HTML to clean up
@type html_buffer: string
@param cleaning_lib: chose the preferred library to clean the HTML. One of:
- utidylib
- beautifulsoup
@return: a cleaned version of the input HTML
@note: requires uTidylib or BeautifulSoup to be installed. If the chosen library is missing, the input X{html_buffer} is returned I{as is}.
"""
if CFG_TIDY_INSTALLED and cleaning_lib == 'utidylib':
options = dict(output_xhtml=1,
show_body_only=1,
merge_divs=0,
wrap=0)
try:
output = str(tidy.parseString(html_buffer, **options))
except:
output = html_buffer
elif CFG_BEAUTIFULSOUP_INSTALLED and cleaning_lib == 'beautifulsoup':
try:
output = str(BeautifulSoup(html_buffer).prettify())
except:
output = html_buffer
else:
output = html_buffer
return output
def get_mathjax_header(https=False):
"""
Return the snippet of HTML code to put in HTML HEAD tag, in order to
enable MathJax support.
@param https: when using the CDN, whether to use the HTTPS URL rather
than the HTTP one.
@type https: bool
@note: with new releases of MathJax, update this function toghether with
$MJV variable in the root Makefile.am
"""
if cfg['CFG_MATHJAX_HOSTING'].lower() == 'cdn':
if https:
mathjax_path = "https://d3eoax9i5htok0.cloudfront.net/mathjax/2.1-latest"
else:
mathjax_path = "http://cdn.mathjax.org/mathjax/2.1-latest"
else:
mathjax_path = "/MathJax"
return """<script type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {inlineMath: [['$','$']],
processEscapes: true},
showProcessingMessages: false,
messageStyle: "none"
});
</script>
<script src="%(mathjax_path)s/MathJax.js?config=TeX-AMS_HTML" type="text/javascript">
</script>""" % {
'mathjax_path': mathjax_path
}
def is_html_text_editor_installed():
"""
Returns True if the wysiwyg editor (CKeditor) is installed
"""
return os.path.exists(os.path.join(cfg['CFG_WEBDIR'], 'ckeditor', 'ckeditor.js'))
ckeditor_available = LocalProxy(is_html_text_editor_installed)
def get_html_text_editor(name, id=None, content='', textual_content=None, width='300px', height='200px',
enabled=True, file_upload_url=None, toolbar_set="Basic",
custom_configurations_path='/js/ckeditor/invenio-ckeditor-config.js',
ln=None):
"""
Returns a wysiwyg editor (CKEditor) to embed in html pages.
Fall back to a simple textarea when the library is not installed,
or when the user's browser is not compatible with the editor, or
when 'enable' is False, or when javascript is not enabled.
NOTE that the output also contains a hidden field named
'editor_type' that contains the kind of editor used, 'textarea' or
'ckeditor'.
Based on 'editor_type' you might want to take different actions,
like replace CRLF with <br/> when editor_type equals to
'textarea', but not when editor_type equals to 'ckeditor'.
@param name: *str* the name attribute of the returned editor
@param id: *str* the id attribute of the returned editor (when
applicable)
@param content: *str* the default content of the editor.
@param textual_content: *str* a content formatted for the case where the
wysiwyg editor is not available for user. When not
specified, use value of 'content'
@param width: *str* width of the editor in an html compatible unit:
Eg: '400px', '50%'.
@param height: *str* height of the editor in an html compatible unit:
Eg: '400px', '50%'.
@param enabled: *bool* if the wysiwyg editor is return (True) or if a
simple texteara is returned (False)
@param file_upload_url: *str* the URL used to upload new files via the
editor upload panel. You have to implement the
handler for your own use. The URL handler will get
form variables 'File' as POST for the uploaded file,
and 'Type' as GET for the type of file ('file',
'image', 'flash', 'media')
When value is not given, the file upload is disabled.
@param toolbar_set: *str* the name of the toolbar layout to
use. CKeditor comes by default with 'Basic' and
'Default'. To define other sets, customize the
config file in
/opt/cds-invenio/var/www/ckeditor/invenio-ckconfig.js
@param custom_configurations_path: *str* value for the CKeditor config
variable 'CustomConfigurationsPath',
which allows to specify the path of a
file that contains a custom configuration
for the editor. The path is relative to
/opt/invenio/var/www/
@return: the HTML markup of the editor
"""
ln = default_ln(ln)
if textual_content is None:
textual_content = content
editor = ''
if enabled and ckeditor_available:
# Prepare upload path settings
file_upload_script = ''
if file_upload_url is not None:
file_upload_script = ''',
filebrowserLinkUploadUrl: '%(file_upload_url)s',
filebrowserImageUploadUrl: '%(file_upload_url)s?type=Image',
filebrowserFlashUploadUrl: '%(file_upload_url)s?type=Flash'
''' % {'file_upload_url': file_upload_url}
# Prepare code to instantiate an editor
editor += '''
<script type="text/javascript" language="javascript">//<![CDATA[
/* Load the script only once, or else multiple instance of the editor on the same page will not work */
var INVENIO_CKEDITOR_ALREADY_LOADED
if (INVENIO_CKEDITOR_ALREADY_LOADED != 1) {
document.write('<script type="text/javascript" src="%(CFG_SITE_URL)s/ckeditor/ckeditor.js"><\/script>');
INVENIO_CKEDITOR_ALREADY_LOADED = 1;
}
//]]></script>
<input type="hidden" name="editor_type" id="%(id)seditortype" value="textarea" />
<textarea rows="100" cols="80" id="%(id)s" name="%(name)s" style="width:%(width)s;height:%(height)s">%(textual_content)s</textarea>
<textarea rows="100" cols="80" id="%(id)shtmlvalue" name="%(name)shtmlvalue" style="display:none;width:%(width)s;height:%(height)s">%(html_content)s</textarea>
<script type="text/javascript">//<![CDATA[
var CKEDITOR_BASEPATH = '/ckeditor/';
CKEDITOR.replace( '%(name)s',
{customConfig: '%(custom_configurations_path)s',
toolbar: '%(toolbar)s',
width: '%(width)s',
height:'%(height)s',
language: '%(ln)s'
%(file_upload_script)s
});
CKEDITOR.on('instanceReady',
function( evt )
{
/* If CKeditor was correctly loaded, display the nice HTML representation */
var oEditor = evt.editor;
editor_id = oEditor.id
editor_name = oEditor.name
var html_editor = document.getElementById(editor_name + 'htmlvalue');
oEditor.setData(html_editor.value);
var editor_type_field = document.getElementById(editor_name + 'editortype');
editor_type_field.value = 'ckeditor';
var writer = oEditor.dataProcessor.writer;
writer.indentationChars = ''; /*Do not indent source code with tabs*/
oEditor.resetDirty();
/* Workaround: http://dev.ckeditor.com/ticket/3674 */
evt.editor.on( 'contentDom', function( ev )
{
ev.removeListener();
evt.editor.resetDirty();
} );
/* End workaround */
})
//]]></script>
''' % \
{'textual_content': cgi.escape(textual_content),
'html_content': content,
'width': width,
'height': height,
'name': name,
'id': id or name,
'custom_configurations_path': custom_configurations_path,
'toolbar': toolbar_set,
'file_upload_script': file_upload_script,
'CFG_SITE_URL': cfg['CFG_SITE_URL'],
'ln': ln}
else:
# CKedior is not installed
textarea = '<textarea rows="100" cols="80" %(id)s name="%(name)s" style="width:%(width)s;height:%(height)s">%(content)s</textarea>' \
% {'content': cgi.escape(textual_content),
'width': width,
'height': height,
'name': name,
'id': id and ('id="%s"' % id) or ''}
editor += textarea
editor += '<input type="hidden" name="editor_type" value="textarea" />'
return editor
def remove_html_markup(text, replacechar=' ', remove_escaped_chars_p=True):
"""
Remove HTML markup from text.
@param text: Input text.
@type text: string.
@param replacechar: By which character should we replace HTML markup.
Usually, a single space or an empty string are nice values.
@type replacechar: string
@param remove_escaped_chars_p: If True, also remove escaped characters
like '&', '<', '>' and '"'.
@type remove_escaped_chars_p: boolean
@return: Input text with HTML markup removed.
@rtype: string
"""
if not remove_escaped_chars_p:
return RE_HTML_WITHOUT_ESCAPED_CHARS.sub(replacechar, text)
return RE_HTML.sub(replacechar, text)
def unescape(s, quote=False):
"""
The opposite of the cgi.escape function.
Replace escaped characters '&', '<' and '>' with the corresponding
regular characters. If the optional flag quote is true, the escaped quotation
mark character ('"') is also translated.
"""
s = s.replace('<', '<')
s = s.replace('>', '>')
if quote:
s = s.replace('"', '"')
s = s.replace('&', '&')
return s
class EscapedString(str):
"""
This class is a stub used by the MLClass machinery in order
to distinguish native string, from string that don't need to be
escaped.
"""
pass
class EscapedHTMLString(EscapedString):
"""
This class automatically escape a non-escaped string used to initialize
it, using the HTML escaping method (i.e. cgi.escape).
"""
def __new__(cls, original_string='', escape_quotes=False):
if isinstance(original_string, EscapedString):
escaped_string = str(original_string)
else:
if original_string and not str(original_string).strip():
escaped_string = ' '
else:
escaped_string = cgi.escape(str(original_string), escape_quotes)
obj = str.__new__(cls, escaped_string)
obj.original_string = original_string
obj.escape_quotes = escape_quotes
return obj
def __repr__(self):
return 'EscapedHTMLString(%s, %s)' % (repr(self.original_string), repr(self.escape_quotes))
def __add__(self, rhs):
return EscapedHTMLString(EscapedString(str(self) + str(rhs)))
class EscapedXMLString(EscapedString):
"""
This class automatically escape a non-escaped string used to initialize
it, using the XML escaping method (i.e. encode_for_xml).
"""
def __new__(cls, original_string='', escape_quotes=False):
if isinstance(original_string, EscapedString):
escaped_string = str(original_string)
else:
if original_string and not str(original_string).strip():
escaped_string = ' '
else:
escaped_string = encode_for_xml(str(original_string), wash=True, quote=escape_quotes)
obj = str.__new__(cls, escaped_string)
obj.original_string = original_string
obj.escape_quotes = escape_quotes
return obj
def __repr__(self):
return 'EscapedXMLString(%s, %s)' % (repr(self.original_string), repr(self.escape_quotes))
def __add__(self, rhs):
return EscapedXMLString(EscapedString(str(self) + str(rhs)))
def create_tag(tag, escaper=EscapedHTMLString, opening_only=False, body=None, escape_body=False, escape_attr=True, indent=0, attrs=None, **other_attrs):
"""
Create an XML/HTML tag.
This function create a full XML/HTML tag, putting toghether an
optional inner body and a dictionary of attributes.
>>> print create_html_tag ("select", create_html_tag("h1",
... "hello", other_attrs={'class': "foo"}))
<select>
<h1 class="foo">
hello
</h1>
</select>
@param tag: the tag (e.g. "select", "body", "h1"...).
@type tag: string
@param body: some text/HTML to put in the body of the tag (this
body will be indented WRT the tag).
@type body: string
@param escape_body: wether the body (if any) must be escaped.
@type escape_body: boolean
@param escape_attr: wether the attribute values (if any) must be
escaped.
@type escape_attr: boolean
@param indent: number of level of indentation for the tag.
@type indent: integer
@param attrs: map of attributes to add to the tag.
@type attrs: dict
@return: the HTML tag.
@rtype: string
"""
if attrs is None:
attrs = {}
for key, value in iteritems(other_attrs):
if value is not None:
if key.endswith('_'):
attrs[key[:-1]] = value
else:
attrs[key] = value
out = "<%s" % tag
for key, value in iteritems(attrs):
if escape_attr:
value = escaper(value, escape_quotes=True)
out += ' %s="%s"' % (key, value)
if body is not None:
if callable(body) and body.__name__ == 'handle_body':
body = body()
out += ">"
if escape_body and not isinstance(body, EscapedString):
body = escaper(body)
out += body
if not opening_only:
out += "</%s>" % tag
elif not opening_only:
out += " />"
if indent:
out = indent_text(out, indent)[:-1]
from invenio.utils.text import wash_for_utf8
return EscapedString(wash_for_utf8(out))
class MLClass(object):
"""
Swiss army knife to generate XML or HTML strings a la carte.
>>> from invenio.utils.html import X, H
>>> X.foo()()
... '<foo />'
>>> X.foo(bar='baz')()
... '<foo bar="baz" />'
>>> X.foo(bar='baz&pi')()
... '<foo bar="baz&pi" />'
>>> X.foo("<body />", bar='baz')
... '<foo bar="baz"><body /></foo>'
>>> X.foo(bar='baz')(X.body())
... '<foo bar="baz"><body /></foo>'
>>> X.foo(bar='baz')("<body />") ## automatic escaping
... '<foo bar="baz"><body /></foo>'
>>> X.foo()(X.p(), X.p()) ## magic concatenation
... '<foo><p /><p /></foo>'
>>> X.foo(class_='bar')() ## protected keywords...
... '<foo class="bar" />'
>>> X["xml-bar"]()()
... '<xml-bar />'
"""
def __init__(self, escaper):
self.escaper = escaper
def __getattr__(self, tag):
def tag_creator(body=None, opening_only=False, escape_body=False, escape_attr=True, indent=0, attrs=None, **other_attrs):
if body:
return create_tag(tag, body=body, opening_only=opening_only, escape_body=escape_body, escape_attr=escape_attr, indent=indent, attrs=attrs, **other_attrs)
else:
def handle_body(*other_bodies):
full_body = None
if other_bodies:
full_body = ""
for body in other_bodies:
if callable(body) and body.__name__ == 'handle_body':
full_body += body()
elif isinstance(body, EscapedString):
full_body += body
else:
full_body += self.escaper(str(body))
return create_tag(tag, body=full_body, opening_only=opening_only, escape_body=escape_body, escape_attr=escape_attr, indent=indent, attrs=attrs, **other_attrs)
return handle_body
return tag_creator
__getitem__ = __getattr__
H = MLClass(EscapedHTMLString)
X = MLClass(EscapedXMLString)
def create_html_select(options, name=None, selected=None, disabled=None, multiple=False, attrs=None, **other_attrs):
"""
Create an HTML select box.
>>> print create_html_select(["foo", "bar"], selected="bar", name="baz")
<select name="baz">
<option selected="selected" value="bar">
bar
</option>
<option value="foo">
foo
</option>
</select>
>>> print create_html_select([("foo", "oof"), ("bar", "rab")], selected="bar", name="baz")
<select name="baz">
<option value="foo">
oof
</option>
<option selected="selected" value="bar">
rab
</option>
</select>
@param options: this can either be a sequence of strings, or a sequence
of couples or a map of C{key->value}. In the former case, the C{select}
tag will contain a list of C{option} tags (in alphabetical order),
where the C{value} attribute is not specified. In the latter case,
the C{value} attribute will be set to the C{key}, while the body
of the C{option} will be set to C{value}.
@type options: sequence or map
@param name: the name of the form element.
@type name: string
@param selected: optional key(s)/value(s) to select by default. In case
a map has been used for options.
@type selected: string (or list of string)
@param disabled: optional key(s)/value(s) to disable.
@type disabled: string (or list of string)
@param multiple: whether a multiple select box must be created.
@type mutable: bool
@param attrs: optional attributes to create the select tag.
@type attrs: dict
@param other_attrs: other optional attributes.
@return: the HTML output.
@rtype: string
@note: the values and keys will be escaped for HTML.
@note: it is important that parameter C{value} is always
specified, in case some browser plugin play with the
markup, for eg. when translating the page.
"""
body = []
if selected is None:
selected = []
elif isinstance(selected, (str, unicode)):
selected = [selected]
if disabled is None:
disabled = []
elif isinstance(disabled, (str, unicode)):
disabled = [disabled]
if name is not None and multiple and not name.endswith('[]'):
name += "[]"
if isinstance(options, dict):
items = options.items()
items.sort(lambda item1, item2: cmp(item1[1], item2[1]))
elif isinstance(options, (list, tuple)):
options = list(options)
items = []
for item in options:
if isinstance(item, (str, unicode)):
items.append((item, item))
elif isinstance(item, (tuple, list)) and len(item) == 2:
items.append(tuple(item))
else:
raise ValueError('Item "%s" of incompatible type: %s' % (item, type(item)))
else:
raise ValueError('Options of incompatible type: %s' % type(options))
for key, value in items:
option_attrs = {}
if key in selected:
option_attrs['selected'] = 'selected'
if key in disabled:
option_attrs['disabled'] = 'disabled'
body.append(create_tag("option", body=value, escape_body=True, value=key, attrs=option_attrs))
if attrs is None:
attrs = {}
if name is not None:
attrs['name'] = name
if multiple:
attrs['multiple'] = 'multiple'
return create_tag("select", body='\n'.join(body), attrs=attrs, **other_attrs)
class _LinkGetter(HTMLParser):
"""
Hidden class that, by deriving from HTMLParser, will intercept all
<a> tags and retrieve the corresponding href attribute.
All URLs are available in the urls attribute of the class.
"""
def __init__(self):
HTMLParser.__init__(self)
self.urls = set()
def handle_starttag(self, tag, attrs):
if tag == 'a':
for (name, value) in attrs:
if name == 'href':
self.urls.add(value)
def get_links_in_html_page(html):
"""
@param html: the HTML text to parse
@type html: str
@return: the list of URLs that were referenced via <a> tags.
@rtype: set of str
"""
parser = _LinkGetter()
parser.feed(html)
return parser.urls
| PXke/invenio | invenio/utils/html.py | Python | gpl-2.0 | 36,826 |
""" Python test discovery, setup and run of test functions. """
import enum
import fnmatch
import inspect
import itertools
import os
import sys
import typing
import warnings
from collections import Counter
from collections import defaultdict
from collections.abc import Sequence
from functools import partial
from typing import Callable
from typing import Dict
from typing import Iterable
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
import py
import _pytest
from _pytest import fixtures
from _pytest import nodes
from _pytest._code import filter_traceback
from _pytest._code.code import ExceptionInfo
from _pytest._code.source import getfslineno
from _pytest._io import TerminalWriter
from _pytest._io.saferepr import saferepr
from _pytest.compat import ascii_escaped
from _pytest.compat import get_default_arg_names
from _pytest.compat import get_real_func
from _pytest.compat import getimfunc
from _pytest.compat import getlocation
from _pytest.compat import is_generator
from _pytest.compat import iscoroutinefunction
from _pytest.compat import NOTSET
from _pytest.compat import REGEX_TYPE
from _pytest.compat import safe_getattr
from _pytest.compat import safe_isclass
from _pytest.compat import STRING_TYPES
from _pytest.config import Config
from _pytest.config import hookimpl
from _pytest.deprecated import FUNCARGNAMES
from _pytest.fixtures import FuncFixtureInfo
from _pytest.mark import MARK_GEN
from _pytest.mark import ParameterSet
from _pytest.mark.structures import get_unpacked_marks
from _pytest.mark.structures import Mark
from _pytest.mark.structures import normalize_mark_list
from _pytest.outcomes import fail
from _pytest.outcomes import skip
from _pytest.pathlib import parts
from _pytest.warning_types import PytestCollectionWarning
from _pytest.warning_types import PytestUnhandledCoroutineWarning
def pyobj_property(name):
def get(self):
node = self.getparent(getattr(__import__("pytest"), name))
if node is not None:
return node.obj
doc = "python {} object this node was collected from (can be None).".format(
name.lower()
)
return property(get, None, None, doc)
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption(
"--fixtures",
"--funcargs",
action="store_true",
dest="showfixtures",
default=False,
help="show available fixtures, sorted by plugin appearance "
"(fixtures with leading '_' are only shown with '-v')",
)
group.addoption(
"--fixtures-per-test",
action="store_true",
dest="show_fixtures_per_test",
default=False,
help="show fixtures per test",
)
parser.addini(
"python_files",
type="args",
# NOTE: default is also used in AssertionRewritingHook.
default=["test_*.py", "*_test.py"],
help="glob-style file patterns for Python test module discovery",
)
parser.addini(
"python_classes",
type="args",
default=["Test"],
help="prefixes or glob names for Python test class discovery",
)
parser.addini(
"python_functions",
type="args",
default=["test"],
help="prefixes or glob names for Python test function and method discovery",
)
parser.addini(
"disable_test_id_escaping_and_forfeit_all_rights_to_community_support",
type="bool",
default=False,
help="disable string escape non-ascii characters, might cause unwanted "
"side effects(use at your own risk)",
)
group.addoption(
"--import-mode",
default="prepend",
choices=["prepend", "append"],
dest="importmode",
help="prepend/append to sys.path when importing test modules, "
"default is to prepend.",
)
def pytest_cmdline_main(config):
if config.option.showfixtures:
showfixtures(config)
return 0
if config.option.show_fixtures_per_test:
show_fixtures_per_test(config)
return 0
def pytest_generate_tests(metafunc: "Metafunc") -> None:
for marker in metafunc.definition.iter_markers(name="parametrize"):
# TODO: Fix this type-ignore (overlapping kwargs).
metafunc.parametrize(*marker.args, **marker.kwargs, _param_mark=marker) # type: ignore[misc] # noqa: F821
def pytest_configure(config):
config.addinivalue_line(
"markers",
"parametrize(argnames, argvalues): call a test function multiple "
"times passing in different arguments in turn. argvalues generally "
"needs to be a list of values if argnames specifies only one name "
"or a list of tuples of values if argnames specifies multiple names. "
"Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
"decorated test function, one with arg1=1 and another with arg1=2."
"see https://docs.pytest.org/en/latest/parametrize.html for more info "
"and examples.",
)
config.addinivalue_line(
"markers",
"usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
"all of the specified fixtures. see "
"https://docs.pytest.org/en/latest/fixture.html#usefixtures ",
)
def async_warn(nodeid: str) -> None:
msg = "async def functions are not natively supported and have been skipped.\n"
msg += (
"You need to install a suitable plugin for your async framework, for example:\n"
)
msg += " - pytest-asyncio\n"
msg += " - pytest-trio\n"
msg += " - pytest-tornasync\n"
msg += " - pytest-twisted"
warnings.warn(PytestUnhandledCoroutineWarning(msg.format(nodeid)))
skip(msg="async def function and no async plugin installed (see warnings)")
@hookimpl(trylast=True)
def pytest_pyfunc_call(pyfuncitem: "Function"):
testfunction = pyfuncitem.obj
try:
# ignoring type as the import is invalid in py37 and mypy thinks its a error
from unittest import IsolatedAsyncioTestCase # type: ignore
except ImportError:
async_ok_in_stdlib = False
else:
async_ok_in_stdlib = isinstance(
getattr(testfunction, "__self__", None), IsolatedAsyncioTestCase
)
if (
iscoroutinefunction(testfunction)
or (sys.version_info >= (3, 6) and inspect.isasyncgenfunction(testfunction))
) and not async_ok_in_stdlib:
async_warn(pyfuncitem.nodeid)
funcargs = pyfuncitem.funcargs
testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
result = testfunction(**testargs)
if hasattr(result, "__await__") or hasattr(result, "__aiter__"):
if async_ok_in_stdlib:
# todo: investigate moving this to the unittest plugin
# by a test call result hook
testcase = testfunction.__self__
testcase._callMaybeAsync(lambda: result)
else:
async_warn(pyfuncitem.nodeid)
return True
def pytest_collect_file(path, parent):
ext = path.ext
if ext == ".py":
if not parent.session.isinitpath(path):
if not path_matches_patterns(
path, parent.config.getini("python_files") + ["__init__.py"]
):
return
ihook = parent.session.gethookproxy(path)
return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
def path_matches_patterns(path, patterns):
"""Returns True if the given py.path.local matches one of the patterns in the list of globs given"""
return any(path.fnmatch(pattern) for pattern in patterns)
def pytest_pycollect_makemodule(path, parent):
if path.basename == "__init__.py":
return Package.from_parent(parent, fspath=path)
return Module.from_parent(parent, fspath=path)
@hookimpl(hookwrapper=True)
def pytest_pycollect_makeitem(collector, name, obj):
outcome = yield
res = outcome.get_result()
if res is not None:
return
# nothing was collected elsewhere, let's do it here
if safe_isclass(obj):
if collector.istestclass(obj, name):
outcome.force_result(Class.from_parent(collector, name=name, obj=obj))
elif collector.istestfunction(obj, name):
# mock seems to store unbound methods (issue473), normalize it
obj = getattr(obj, "__func__", obj)
# We need to try and unwrap the function if it's a functools.partial
# or a functools.wrapped.
# We mustn't if it's been wrapped with mock.patch (python 2 only)
if not (inspect.isfunction(obj) or inspect.isfunction(get_real_func(obj))):
filename, lineno = getfslineno(obj)
warnings.warn_explicit(
message=PytestCollectionWarning(
"cannot collect %r because it is not a function." % name
),
category=None,
filename=str(filename),
lineno=lineno + 1,
)
elif getattr(obj, "__test__", True):
if is_generator(obj):
res = Function.from_parent(collector, name=name)
reason = "yield tests were removed in pytest 4.0 - {name} will be ignored".format(
name=name
)
res.add_marker(MARK_GEN.xfail(run=False, reason=reason))
res.warn(PytestCollectionWarning(reason))
else:
res = list(collector._genfunctions(name, obj))
outcome.force_result(res)
class PyobjMixin:
module = pyobj_property("Module")
cls = pyobj_property("Class")
instance = pyobj_property("Instance")
_ALLOW_MARKERS = True
@property
def obj(self):
"""Underlying Python object."""
obj = getattr(self, "_obj", None)
if obj is None:
self._obj = obj = self._getobj()
# XXX evil hack
# used to avoid Instance collector marker duplication
if self._ALLOW_MARKERS:
self.own_markers.extend(get_unpacked_marks(self.obj))
return obj
@obj.setter
def obj(self, value):
self._obj = value
def _getobj(self):
"""Gets the underlying Python object. May be overwritten by subclasses."""
return getattr(self.parent.obj, self.name)
def getmodpath(self, stopatmodule=True, includemodule=False):
""" return python path relative to the containing module. """
chain = self.listchain()
chain.reverse()
parts = []
for node in chain:
if isinstance(node, Instance):
continue
name = node.name
if isinstance(node, Module):
name = os.path.splitext(name)[0]
if stopatmodule:
if includemodule:
parts.append(name)
break
parts.append(name)
parts.reverse()
return ".".join(parts)
def reportinfo(self) -> Tuple[Union[py.path.local, str], int, str]:
# XXX caching?
obj = self.obj
compat_co_firstlineno = getattr(obj, "compat_co_firstlineno", None)
if isinstance(compat_co_firstlineno, int):
# nose compatibility
file_path = sys.modules[obj.__module__].__file__
if file_path.endswith(".pyc"):
file_path = file_path[:-1]
fspath = file_path # type: Union[py.path.local, str]
lineno = compat_co_firstlineno
else:
fspath, lineno = getfslineno(obj)
modpath = self.getmodpath()
assert isinstance(lineno, int)
return fspath, lineno, modpath
class PyCollector(PyobjMixin, nodes.Collector):
def funcnamefilter(self, name):
return self._matches_prefix_or_glob_option("python_functions", name)
def isnosetest(self, obj):
""" Look for the __test__ attribute, which is applied by the
@nose.tools.istest decorator
"""
# We explicitly check for "is True" here to not mistakenly treat
# classes with a custom __getattr__ returning something truthy (like a
# function) as test classes.
return safe_getattr(obj, "__test__", False) is True
def classnamefilter(self, name):
return self._matches_prefix_or_glob_option("python_classes", name)
def istestfunction(self, obj, name):
if self.funcnamefilter(name) or self.isnosetest(obj):
if isinstance(obj, staticmethod):
# static methods need to be unwrapped
obj = safe_getattr(obj, "__func__", False)
return (
safe_getattr(obj, "__call__", False)
and fixtures.getfixturemarker(obj) is None
)
else:
return False
def istestclass(self, obj, name):
return self.classnamefilter(name) or self.isnosetest(obj)
def _matches_prefix_or_glob_option(self, option_name, name):
"""
checks if the given name matches the prefix or glob-pattern defined
in ini configuration.
"""
for option in self.config.getini(option_name):
if name.startswith(option):
return True
# check that name looks like a glob-string before calling fnmatch
# because this is called for every name in each collected module,
# and fnmatch is somewhat expensive to call
elif ("*" in option or "?" in option or "[" in option) and fnmatch.fnmatch(
name, option
):
return True
return False
def collect(self):
if not getattr(self.obj, "__test__", True):
return []
# NB. we avoid random getattrs and peek in the __dict__ instead
# (XXX originally introduced from a PyPy need, still true?)
dicts = [getattr(self.obj, "__dict__", {})]
for basecls in inspect.getmro(self.obj.__class__):
dicts.append(basecls.__dict__)
seen = {}
values = []
for dic in dicts:
for name, obj in list(dic.items()):
if name in seen:
continue
seen[name] = True
res = self._makeitem(name, obj)
if res is None:
continue
if not isinstance(res, list):
res = [res]
values.extend(res)
def sort_key(item):
fspath, lineno, _ = item.reportinfo()
return (str(fspath), lineno)
values.sort(key=sort_key)
return values
def _makeitem(self, name, obj):
# assert self.ihook.fspath == self.fspath, self
return self.ihook.pytest_pycollect_makeitem(collector=self, name=name, obj=obj)
def _genfunctions(self, name, funcobj):
module = self.getparent(Module).obj
clscol = self.getparent(Class)
cls = clscol and clscol.obj or None
fm = self.session._fixturemanager
definition = FunctionDefinition.from_parent(self, name=name, callobj=funcobj)
fixtureinfo = definition._fixtureinfo
metafunc = Metafunc(
definition, fixtureinfo, self.config, cls=cls, module=module
)
methods = []
if hasattr(module, "pytest_generate_tests"):
methods.append(module.pytest_generate_tests)
if hasattr(cls, "pytest_generate_tests"):
methods.append(cls().pytest_generate_tests)
self.ihook.pytest_generate_tests.call_extra(methods, dict(metafunc=metafunc))
if not metafunc._calls:
yield Function.from_parent(self, name=name, fixtureinfo=fixtureinfo)
else:
# add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)
# add_funcarg_pseudo_fixture_def may have shadowed some fixtures
# with direct parametrization, so make sure we update what the
# function really needs.
fixtureinfo.prune_dependency_tree()
for callspec in metafunc._calls:
subname = "{}[{}]".format(name, callspec.id)
yield Function.from_parent(
self,
name=subname,
callspec=callspec,
callobj=funcobj,
fixtureinfo=fixtureinfo,
keywords={callspec.id: True},
originalname=name,
)
class Module(nodes.File, PyCollector):
""" Collector for test classes and functions. """
def _getobj(self):
return self._importtestmodule()
def collect(self):
self._inject_setup_module_fixture()
self._inject_setup_function_fixture()
self.session._fixturemanager.parsefactories(self)
return super().collect()
def _inject_setup_module_fixture(self):
"""Injects a hidden autouse, module scoped fixture into the collected module object
that invokes setUpModule/tearDownModule if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_module = _get_first_non_fixture_func(
self.obj, ("setUpModule", "setup_module")
)
teardown_module = _get_first_non_fixture_func(
self.obj, ("tearDownModule", "teardown_module")
)
if setup_module is None and teardown_module is None:
return
@fixtures.fixture(autouse=True, scope="module")
def xunit_setup_module_fixture(request):
if setup_module is not None:
_call_with_optional_argument(setup_module, request.module)
yield
if teardown_module is not None:
_call_with_optional_argument(teardown_module, request.module)
self.obj.__pytest_setup_module = xunit_setup_module_fixture
def _inject_setup_function_fixture(self):
"""Injects a hidden autouse, function scoped fixture into the collected module object
that invokes setup_function/teardown_function if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_function = _get_first_non_fixture_func(self.obj, ("setup_function",))
teardown_function = _get_first_non_fixture_func(
self.obj, ("teardown_function",)
)
if setup_function is None and teardown_function is None:
return
@fixtures.fixture(autouse=True, scope="function")
def xunit_setup_function_fixture(request):
if request.instance is not None:
# in this case we are bound to an instance, so we need to let
# setup_method handle this
yield
return
if setup_function is not None:
_call_with_optional_argument(setup_function, request.function)
yield
if teardown_function is not None:
_call_with_optional_argument(teardown_function, request.function)
self.obj.__pytest_setup_function = xunit_setup_function_fixture
def _importtestmodule(self):
# we assume we are only called once per module
importmode = self.config.getoption("--import-mode")
try:
mod = self.fspath.pyimport(ensuresyspath=importmode)
except SyntaxError:
raise self.CollectError(ExceptionInfo.from_current().getrepr(style="short"))
except self.fspath.ImportMismatchError as e:
raise self.CollectError(
"import file mismatch:\n"
"imported module %r has this __file__ attribute:\n"
" %s\n"
"which is not the same as the test file we want to collect:\n"
" %s\n"
"HINT: remove __pycache__ / .pyc files and/or use a "
"unique basename for your test file modules" % e.args
)
except ImportError:
exc_info = ExceptionInfo.from_current()
if self.config.getoption("verbose") < 2:
exc_info.traceback = exc_info.traceback.filter(filter_traceback)
exc_repr = (
exc_info.getrepr(style="short")
if exc_info.traceback
else exc_info.exconly()
)
formatted_tb = str(exc_repr)
raise self.CollectError(
"ImportError while importing test module '{fspath}'.\n"
"Hint: make sure your test modules/packages have valid Python names.\n"
"Traceback:\n"
"{traceback}".format(fspath=self.fspath, traceback=formatted_tb)
)
except _pytest.runner.Skipped as e:
if e.allow_module_level:
raise
raise self.CollectError(
"Using pytest.skip outside of a test is not allowed. "
"To decorate a test function, use the @pytest.mark.skip "
"or @pytest.mark.skipif decorators instead, and to skip a "
"module use `pytestmark = pytest.mark.{skip,skipif}."
)
self.config.pluginmanager.consider_module(mod)
return mod
class Package(Module):
def __init__(
self,
fspath: py.path.local,
parent: nodes.Collector,
# NOTE: following args are unused:
config=None,
session=None,
nodeid=None,
) -> None:
# NOTE: could be just the following, but kept as-is for compat.
# nodes.FSCollector.__init__(self, fspath, parent=parent)
session = parent.session
nodes.FSCollector.__init__(
self, fspath, parent=parent, config=config, session=session, nodeid=nodeid
)
self.name = fspath.dirname
def setup(self):
# not using fixtures to call setup_module here because autouse fixtures
# from packages are not called automatically (#4085)
setup_module = _get_first_non_fixture_func(
self.obj, ("setUpModule", "setup_module")
)
if setup_module is not None:
_call_with_optional_argument(setup_module, self.obj)
teardown_module = _get_first_non_fixture_func(
self.obj, ("tearDownModule", "teardown_module")
)
if teardown_module is not None:
func = partial(_call_with_optional_argument, teardown_module, self.obj)
self.addfinalizer(func)
def gethookproxy(self, fspath: py.path.local):
return super()._gethookproxy(fspath)
def isinitpath(self, path):
return path in self.session._initialpaths
def collect(self):
this_path = self.fspath.dirpath()
init_module = this_path.join("__init__.py")
if init_module.check(file=1) and path_matches_patterns(
init_module, self.config.getini("python_files")
):
yield Module.from_parent(self, fspath=init_module)
pkg_prefixes = set()
for path in this_path.visit(rec=self._recurse, bf=True, sort=True):
# We will visit our own __init__.py file, in which case we skip it.
is_file = path.isfile()
if is_file:
if path.basename == "__init__.py" and path.dirpath() == this_path:
continue
parts_ = parts(path.strpath)
if any(
pkg_prefix in parts_ and pkg_prefix.join("__init__.py") != path
for pkg_prefix in pkg_prefixes
):
continue
if is_file:
yield from self._collectfile(path)
elif not path.isdir():
# Broken symlink or invalid/missing file.
continue
elif path.join("__init__.py").check(file=1):
pkg_prefixes.add(path)
def _call_with_optional_argument(func, arg):
"""Call the given function with the given argument if func accepts one argument, otherwise
calls func without arguments"""
arg_count = func.__code__.co_argcount
if inspect.ismethod(func):
arg_count -= 1
if arg_count:
func(arg)
else:
func()
def _get_first_non_fixture_func(obj, names):
"""Return the attribute from the given object to be used as a setup/teardown
xunit-style function, but only if not marked as a fixture to
avoid calling it twice.
"""
for name in names:
meth = getattr(obj, name, None)
if meth is not None and fixtures.getfixturemarker(meth) is None:
return meth
class Class(PyCollector):
""" Collector for test methods. """
@classmethod
def from_parent(cls, parent, *, name, obj=None):
"""
The public constructor
"""
return super().from_parent(name=name, parent=parent)
def collect(self):
if not safe_getattr(self.obj, "__test__", True):
return []
if hasinit(self.obj):
self.warn(
PytestCollectionWarning(
"cannot collect test class %r because it has a "
"__init__ constructor (from: %s)"
% (self.obj.__name__, self.parent.nodeid)
)
)
return []
elif hasnew(self.obj):
self.warn(
PytestCollectionWarning(
"cannot collect test class %r because it has a "
"__new__ constructor (from: %s)"
% (self.obj.__name__, self.parent.nodeid)
)
)
return []
self._inject_setup_class_fixture()
self._inject_setup_method_fixture()
return [Instance.from_parent(self, name="()")]
def _inject_setup_class_fixture(self):
"""Injects a hidden autouse, class scoped fixture into the collected class object
that invokes setup_class/teardown_class if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_class = _get_first_non_fixture_func(self.obj, ("setup_class",))
teardown_class = getattr(self.obj, "teardown_class", None)
if setup_class is None and teardown_class is None:
return
@fixtures.fixture(autouse=True, scope="class")
def xunit_setup_class_fixture(cls):
if setup_class is not None:
func = getimfunc(setup_class)
_call_with_optional_argument(func, self.obj)
yield
if teardown_class is not None:
func = getimfunc(teardown_class)
_call_with_optional_argument(func, self.obj)
self.obj.__pytest_setup_class = xunit_setup_class_fixture
def _inject_setup_method_fixture(self):
"""Injects a hidden autouse, function scoped fixture into the collected class object
that invokes setup_method/teardown_method if either or both are available.
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
other fixtures (#517).
"""
setup_method = _get_first_non_fixture_func(self.obj, ("setup_method",))
teardown_method = getattr(self.obj, "teardown_method", None)
if setup_method is None and teardown_method is None:
return
@fixtures.fixture(autouse=True, scope="function")
def xunit_setup_method_fixture(self, request):
method = request.function
if setup_method is not None:
func = getattr(self, "setup_method")
_call_with_optional_argument(func, method)
yield
if teardown_method is not None:
func = getattr(self, "teardown_method")
_call_with_optional_argument(func, method)
self.obj.__pytest_setup_method = xunit_setup_method_fixture
class Instance(PyCollector):
_ALLOW_MARKERS = False # hack, destroy later
# instances share the object with their parents in a way
# that duplicates markers instances if not taken out
# can be removed at node structure reorganization time
def _getobj(self):
return self.parent.obj()
def collect(self):
self.session._fixturemanager.parsefactories(self)
return super().collect()
def newinstance(self):
self.obj = self._getobj()
return self.obj
def hasinit(obj):
init = getattr(obj, "__init__", None)
if init:
return init != object.__init__
def hasnew(obj):
new = getattr(obj, "__new__", None)
if new:
return new != object.__new__
class CallSpec2:
def __init__(self, metafunc):
self.metafunc = metafunc
self.funcargs = {}
self._idlist = []
self.params = {}
self._arg2scopenum = {} # used for sorting parametrized resources
self.marks = []
self.indices = {}
def copy(self):
cs = CallSpec2(self.metafunc)
cs.funcargs.update(self.funcargs)
cs.params.update(self.params)
cs.marks.extend(self.marks)
cs.indices.update(self.indices)
cs._arg2scopenum.update(self._arg2scopenum)
cs._idlist = list(self._idlist)
return cs
def _checkargnotcontained(self, arg):
if arg in self.params or arg in self.funcargs:
raise ValueError("duplicate {!r}".format(arg))
def getparam(self, name):
try:
return self.params[name]
except KeyError:
raise ValueError(name)
@property
def id(self):
return "-".join(map(str, self._idlist))
def setmulti2(self, valtypes, argnames, valset, id, marks, scopenum, param_index):
for arg, val in zip(argnames, valset):
self._checkargnotcontained(arg)
valtype_for_arg = valtypes[arg]
getattr(self, valtype_for_arg)[arg] = val
self.indices[arg] = param_index
self._arg2scopenum[arg] = scopenum
self._idlist.append(id)
self.marks.extend(normalize_mark_list(marks))
class Metafunc:
"""
Metafunc objects are passed to the :func:`pytest_generate_tests <_pytest.hookspec.pytest_generate_tests>` hook.
They help to inspect a test function and to generate tests according to
test configuration or values specified in the class or module where a
test function is defined.
"""
def __init__(
self,
definition: "FunctionDefinition",
fixtureinfo: fixtures.FuncFixtureInfo,
config: Config,
cls=None,
module=None,
) -> None:
self.definition = definition
#: access to the :class:`_pytest.config.Config` object for the test session
self.config = config
#: the module object where the test function is defined in.
self.module = module
#: underlying python test function
self.function = definition.obj
#: set of fixture names required by the test function
self.fixturenames = fixtureinfo.names_closure
#: class object where the test function is defined in or ``None``.
self.cls = cls
self._calls = [] # type: List[CallSpec2]
self._arg2fixturedefs = fixtureinfo.name2fixturedefs
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
def parametrize(
self,
argnames: Union[str, List[str], Tuple[str, ...]],
argvalues: Iterable[Union[ParameterSet, typing.Sequence[object], object]],
indirect: Union[bool, typing.Sequence[str]] = False,
ids: Optional[
Union[
Iterable[Union[None, str, float, int, bool]],
Callable[[object], Optional[object]],
]
] = None,
scope: "Optional[str]" = None,
*,
_param_mark: Optional[Mark] = None
) -> None:
""" Add new invocations to the underlying test function using the list
of argvalues for the given argnames. Parametrization is performed
during the collection phase. If you need to setup expensive resources
see about setting indirect to do it rather at test setup time.
:arg argnames: a comma-separated string denoting one or more argument
names, or a list/tuple of argument strings.
:arg argvalues: The list of argvalues determines how often a
test is invoked with different argument values. If only one
argname was specified argvalues is a list of values. If N
argnames were specified, argvalues must be a list of N-tuples,
where each tuple-element specifies a value for its respective
argname.
:arg indirect: The list of argnames or boolean. A list of arguments'
names (subset of argnames). If True the list contains all names from
the argnames. Each argvalue corresponding to an argname in this list will
be passed as request.param to its respective argname fixture
function so that it can perform more expensive setups during the
setup phase of a test rather than at collection time.
:arg ids: sequence of (or generator for) ids for ``argvalues``,
or a callable to return part of the id for each argvalue.
With sequences (and generators like ``itertools.count()``) the
returned ids should be of type ``string``, ``int``, ``float``,
``bool``, or ``None``.
They are mapped to the corresponding index in ``argvalues``.
``None`` means to use the auto-generated id.
If it is a callable it will be called for each entry in
``argvalues``, and the return value is used as part of the
auto-generated id for the whole set (where parts are joined with
dashes ("-")).
This is useful to provide more specific ids for certain items, e.g.
dates. Returning ``None`` will use an auto-generated id.
If no ids are provided they will be generated automatically from
the argvalues.
:arg scope: if specified it denotes the scope of the parameters.
The scope is used for grouping tests by parameter instances.
It will also override any fixture-function defined scope, allowing
to set a dynamic scope using test context or configuration.
"""
from _pytest.fixtures import scope2index
argnames, parameters = ParameterSet._for_parametrize(
argnames,
argvalues,
self.function,
self.config,
function_definition=self.definition,
)
del argvalues
if "request" in argnames:
fail(
"'request' is a reserved name and cannot be used in @pytest.mark.parametrize",
pytrace=False,
)
if scope is None:
scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)
self._validate_if_using_arg_names(argnames, indirect)
arg_values_types = self._resolve_arg_value_types(argnames, indirect)
self._validate_explicit_parameters(argnames, indirect)
# Use any already (possibly) generated ids with parametrize Marks.
if _param_mark and _param_mark._param_ids_from:
generated_ids = _param_mark._param_ids_from._param_ids_generated
if generated_ids is not None:
ids = generated_ids
ids = self._resolve_arg_ids(argnames, ids, parameters, item=self.definition)
# Store used (possibly generated) ids with parametrize Marks.
if _param_mark and _param_mark._param_ids_from and generated_ids is None:
object.__setattr__(_param_mark._param_ids_from, "_param_ids_generated", ids)
scopenum = scope2index(
scope, descr="parametrize() call in {}".format(self.function.__name__)
)
# create the new calls: if we are parametrize() multiple times (by applying the decorator
# more than once) then we accumulate those calls generating the cartesian product
# of all calls
newcalls = []
for callspec in self._calls or [CallSpec2(self)]:
for param_index, (param_id, param_set) in enumerate(zip(ids, parameters)):
newcallspec = callspec.copy()
newcallspec.setmulti2(
arg_values_types,
argnames,
param_set.values,
param_id,
param_set.marks,
scopenum,
param_index,
)
newcalls.append(newcallspec)
self._calls = newcalls
def _resolve_arg_ids(
self,
argnames: typing.Sequence[str],
ids: Optional[
Union[
Iterable[Union[None, str, float, int, bool]],
Callable[[object], Optional[object]],
]
],
parameters: typing.Sequence[ParameterSet],
item,
) -> List[str]:
"""Resolves the actual ids for the given argnames, based on the ``ids`` parameter given
to ``parametrize``.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param ids: the ids parameter of the parametrized call (see docs).
:param List[ParameterSet] parameters: the list of parameter values, same size as ``argnames``.
:param Item item: the item that generated this parametrized call.
:rtype: List[str]
:return: the list of ids for each argname given
"""
if ids is None:
idfn = None
ids_ = None
elif callable(ids):
idfn = ids
ids_ = None
else:
idfn = None
ids_ = self._validate_ids(ids, parameters, self.function.__name__)
return idmaker(argnames, parameters, idfn, ids_, self.config, item=item)
def _validate_ids(
self,
ids: Iterable[Union[None, str, float, int, bool]],
parameters: typing.Sequence[ParameterSet],
func_name: str,
) -> List[Union[None, str]]:
try:
num_ids = len(ids) # type: ignore[arg-type] # noqa: F821
except TypeError:
try:
iter(ids)
except TypeError:
raise TypeError("ids must be a callable or an iterable")
num_ids = len(parameters)
# num_ids == 0 is a special case: https://github.com/pytest-dev/pytest/issues/1849
if num_ids != len(parameters) and num_ids != 0:
msg = "In {}: {} parameter sets specified, with different number of ids: {}"
fail(msg.format(func_name, len(parameters), num_ids), pytrace=False)
new_ids = []
for idx, id_value in enumerate(itertools.islice(ids, num_ids)):
if id_value is None or isinstance(id_value, str):
new_ids.append(id_value)
elif isinstance(id_value, (float, int, bool)):
new_ids.append(str(id_value))
else:
msg = "In {}: ids must be list of string/float/int/bool, found: {} (type: {!r}) at index {}"
fail(
msg.format(func_name, saferepr(id_value), type(id_value), idx),
pytrace=False,
)
return new_ids
def _resolve_arg_value_types(
self,
argnames: typing.Sequence[str],
indirect: Union[bool, typing.Sequence[str]],
) -> Dict[str, str]:
"""Resolves if each parametrized argument must be considered a parameter to a fixture or a "funcarg"
to the function, based on the ``indirect`` parameter of the parametrized() call.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param indirect: same ``indirect`` parameter of ``parametrize()``.
:rtype: Dict[str, str]
A dict mapping each arg name to either:
* "params" if the argname should be the parameter of a fixture of the same name.
* "funcargs" if the argname should be a parameter to the parametrized test function.
"""
if isinstance(indirect, bool):
valtypes = dict.fromkeys(argnames, "params" if indirect else "funcargs")
elif isinstance(indirect, Sequence):
valtypes = dict.fromkeys(argnames, "funcargs")
for arg in indirect:
if arg not in argnames:
fail(
"In {}: indirect fixture '{}' doesn't exist".format(
self.function.__name__, arg
),
pytrace=False,
)
valtypes[arg] = "params"
else:
fail(
"In {func}: expected Sequence or boolean for indirect, got {type}".format(
type=type(indirect).__name__, func=self.function.__name__
),
pytrace=False,
)
return valtypes
def _validate_if_using_arg_names(
self,
argnames: typing.Sequence[str],
indirect: Union[bool, typing.Sequence[str]],
) -> None:
"""
Check if all argnames are being used, by default values, or directly/indirectly.
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param indirect: same ``indirect`` parameter of ``parametrize()``.
:raise ValueError: if validation fails.
"""
default_arg_names = set(get_default_arg_names(self.function))
func_name = self.function.__name__
for arg in argnames:
if arg not in self.fixturenames:
if arg in default_arg_names:
fail(
"In {}: function already takes an argument '{}' with a default value".format(
func_name, arg
),
pytrace=False,
)
else:
if isinstance(indirect, Sequence):
name = "fixture" if arg in indirect else "argument"
else:
name = "fixture" if indirect else "argument"
fail(
"In {}: function uses no {} '{}'".format(func_name, name, arg),
pytrace=False,
)
def _validate_explicit_parameters(
self,
argnames: typing.Sequence[str],
indirect: Union[bool, typing.Sequence[str]],
) -> None:
"""
The argnames in *parametrize* should either be declared explicitly via
indirect list or in the function signature
:param List[str] argnames: list of argument names passed to ``parametrize()``.
:param indirect: same ``indirect`` parameter of ``parametrize()``.
:raise ValueError: if validation fails
"""
if isinstance(indirect, bool):
parametrized_argnames = [] if indirect else argnames
else:
parametrized_argnames = [arg for arg in argnames if arg not in indirect]
if not parametrized_argnames:
return
funcargnames = _pytest.compat.getfuncargnames(self.function)
usefixtures = fixtures.get_use_fixtures_for_node(self.definition)
for arg in parametrized_argnames:
if arg not in funcargnames and arg not in usefixtures:
func_name = self.function.__name__
msg = (
'In function "{func_name}":\n'
'Parameter "{arg}" should be declared explicitly via indirect or in function itself'
).format(func_name=func_name, arg=arg)
fail(msg, pytrace=False)
def _find_parametrized_scope(argnames, arg2fixturedefs, indirect):
"""Find the most appropriate scope for a parametrized call based on its arguments.
When there's at least one direct argument, always use "function" scope.
When a test function is parametrized and all its arguments are indirect
(e.g. fixtures), return the most narrow scope based on the fixtures used.
Related to issue #1832, based on code posted by @Kingdread.
"""
from _pytest.fixtures import scopes
if isinstance(indirect, (list, tuple)):
all_arguments_are_fixtures = len(indirect) == len(argnames)
else:
all_arguments_are_fixtures = bool(indirect)
if all_arguments_are_fixtures:
fixturedefs = arg2fixturedefs or {}
used_scopes = [
fixturedef[0].scope
for name, fixturedef in fixturedefs.items()
if name in argnames
]
if used_scopes:
# Takes the most narrow scope from used fixtures
for scope in reversed(scopes):
if scope in used_scopes:
return scope
return "function"
def _ascii_escaped_by_config(val: Union[str, bytes], config: Optional[Config]) -> str:
if config is None:
escape_option = False
else:
escape_option = config.getini(
"disable_test_id_escaping_and_forfeit_all_rights_to_community_support"
)
# TODO: If escaping is turned off and the user passes bytes,
# will return a bytes. For now we ignore this but the
# code *probably* doesn't handle this case.
return val if escape_option else ascii_escaped(val) # type: ignore
def _idval(
val: object,
argname: str,
idx: int,
idfn: Optional[Callable[[object], Optional[object]]],
item,
config: Optional[Config],
) -> str:
if idfn:
try:
generated_id = idfn(val)
if generated_id is not None:
val = generated_id
except Exception as e:
msg = "{}: error raised while trying to determine id of parameter '{}' at position {}"
msg = msg.format(item.nodeid, argname, idx)
raise ValueError(msg) from e
elif config:
hook_id = config.hook.pytest_make_parametrize_id(
config=config, val=val, argname=argname
) # type: Optional[str]
if hook_id:
return hook_id
if isinstance(val, STRING_TYPES):
return _ascii_escaped_by_config(val, config)
elif val is None or isinstance(val, (float, int, bool)):
return str(val)
elif isinstance(val, REGEX_TYPE):
return ascii_escaped(val.pattern)
elif isinstance(val, enum.Enum):
return str(val)
elif isinstance(getattr(val, "__name__", None), str):
# name of a class, function, module, etc.
name = getattr(val, "__name__") # type: str
return name
return str(argname) + str(idx)
def _idvalset(
idx: int,
parameterset: ParameterSet,
argnames: Iterable[str],
idfn: Optional[Callable[[object], Optional[object]]],
ids: Optional[List[Union[None, str]]],
item,
config: Optional[Config],
):
if parameterset.id is not None:
return parameterset.id
id = None if ids is None or idx >= len(ids) else ids[idx]
if id is None:
this_id = [
_idval(val, argname, idx, idfn, item=item, config=config)
for val, argname in zip(parameterset.values, argnames)
]
return "-".join(this_id)
else:
return _ascii_escaped_by_config(id, config)
def idmaker(
argnames: Iterable[str],
parametersets: Iterable[ParameterSet],
idfn: Optional[Callable[[object], Optional[object]]] = None,
ids: Optional[List[Union[None, str]]] = None,
config: Optional[Config] = None,
item=None,
) -> List[str]:
resolved_ids = [
_idvalset(valindex, parameterset, argnames, idfn, ids, config=config, item=item)
for valindex, parameterset in enumerate(parametersets)
]
# All IDs must be unique!
unique_ids = set(resolved_ids)
if len(unique_ids) != len(resolved_ids):
# Record the number of occurrences of each test ID
test_id_counts = Counter(resolved_ids)
# Map the test ID to its next suffix
test_id_suffixes = defaultdict(int) # type: Dict[str, int]
# Suffix non-unique IDs to make them unique
for index, test_id in enumerate(resolved_ids):
if test_id_counts[test_id] > 1:
resolved_ids[index] = "{}{}".format(test_id, test_id_suffixes[test_id])
test_id_suffixes[test_id] += 1
return resolved_ids
def show_fixtures_per_test(config):
from _pytest.main import wrap_session
return wrap_session(config, _show_fixtures_per_test)
def _show_fixtures_per_test(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
def get_best_relpath(func):
loc = getlocation(func, curdir)
return curdir.bestrelpath(loc)
def write_fixture(fixture_def):
argname = fixture_def.argname
if verbose <= 0 and argname.startswith("_"):
return
if verbose > 0:
bestrel = get_best_relpath(fixture_def.func)
funcargspec = "{} -- {}".format(argname, bestrel)
else:
funcargspec = argname
tw.line(funcargspec, green=True)
fixture_doc = inspect.getdoc(fixture_def.func)
if fixture_doc:
write_docstring(tw, fixture_doc)
else:
tw.line(" no docstring available", red=True)
def write_item(item):
try:
info = item._fixtureinfo
except AttributeError:
# doctests items have no _fixtureinfo attribute
return
if not info.name2fixturedefs:
# this test item does not use any fixtures
return
tw.line()
tw.sep("-", "fixtures used by {}".format(item.name))
tw.sep("-", "({})".format(get_best_relpath(item.function)))
# dict key not used in loop but needed for sorting
for _, fixturedefs in sorted(info.name2fixturedefs.items()):
assert fixturedefs is not None
if not fixturedefs:
continue
# last item is expected to be the one used by the test item
write_fixture(fixturedefs[-1])
for session_item in session.items:
write_item(session_item)
def showfixtures(config):
from _pytest.main import wrap_session
return wrap_session(config, _showfixtures_main)
def _showfixtures_main(config, session):
import _pytest.config
session.perform_collect()
curdir = py.path.local()
tw = _pytest.config.create_terminal_writer(config)
verbose = config.getvalue("verbose")
fm = session._fixturemanager
available = []
seen = set()
for argname, fixturedefs in fm._arg2fixturedefs.items():
assert fixturedefs is not None
if not fixturedefs:
continue
for fixturedef in fixturedefs:
loc = getlocation(fixturedef.func, curdir)
if (fixturedef.argname, loc) in seen:
continue
seen.add((fixturedef.argname, loc))
available.append(
(
len(fixturedef.baseid),
fixturedef.func.__module__,
curdir.bestrelpath(loc),
fixturedef.argname,
fixturedef,
)
)
available.sort()
currentmodule = None
for baseid, module, bestrel, argname, fixturedef in available:
if currentmodule != module:
if not module.startswith("_pytest."):
tw.line()
tw.sep("-", "fixtures defined from {}".format(module))
currentmodule = module
if verbose <= 0 and argname[0] == "_":
continue
tw.write(argname, green=True)
if fixturedef.scope != "function":
tw.write(" [%s scope]" % fixturedef.scope, cyan=True)
if verbose > 0:
tw.write(" -- %s" % bestrel, yellow=True)
tw.write("\n")
loc = getlocation(fixturedef.func, curdir)
doc = inspect.getdoc(fixturedef.func)
if doc:
write_docstring(tw, doc)
else:
tw.line(" {}: no docstring available".format(loc), red=True)
tw.line()
def write_docstring(tw: TerminalWriter, doc: str, indent: str = " ") -> None:
for line in doc.split("\n"):
tw.write(indent + line + "\n")
class Function(PyobjMixin, nodes.Item):
""" a Function Item is responsible for setting up and executing a
Python test function.
"""
# disable since functions handle it themselves
_ALLOW_MARKERS = False
def __init__(
self,
name,
parent,
args=None,
config=None,
callspec: Optional[CallSpec2] = None,
callobj=NOTSET,
keywords=None,
session=None,
fixtureinfo: Optional[FuncFixtureInfo] = None,
originalname=None,
) -> None:
super().__init__(name, parent, config=config, session=session)
self._args = args
if callobj is not NOTSET:
self.obj = callobj
self.keywords.update(self.obj.__dict__)
self.own_markers.extend(get_unpacked_marks(self.obj))
if callspec:
self.callspec = callspec
# this is total hostile and a mess
# keywords are broken by design by now
# this will be redeemed later
for mark in callspec.marks:
# feel free to cry, this was broken for years before
# and keywords cant fix it per design
self.keywords[mark.name] = mark
self.own_markers.extend(normalize_mark_list(callspec.marks))
if keywords:
self.keywords.update(keywords)
# todo: this is a hell of a hack
# https://github.com/pytest-dev/pytest/issues/4569
self.keywords.update(
{
mark.name: True
for mark in self.iter_markers()
if mark.name not in self.keywords
}
)
if fixtureinfo is None:
fixtureinfo = self.session._fixturemanager.getfixtureinfo(
self, self.obj, self.cls, funcargs=True
)
self._fixtureinfo = fixtureinfo # type: FuncFixtureInfo
self.fixturenames = fixtureinfo.names_closure
self._initrequest()
#: original function name, without any decorations (for example
#: parametrization adds a ``"[...]"`` suffix to function names).
#:
#: .. versionadded:: 3.0
self.originalname = originalname
@classmethod
def from_parent(cls, parent, **kw): # todo: determine sound type limitations
"""
The public constructor
"""
return super().from_parent(parent=parent, **kw)
def _initrequest(self):
self.funcargs = {}
self._request = fixtures.FixtureRequest(self)
@property
def function(self):
"underlying python 'function' object"
return getimfunc(self.obj)
def _getobj(self):
name = self.name
i = name.find("[") # parametrization
if i != -1:
name = name[:i]
return getattr(self.parent.obj, name)
@property
def _pyfuncitem(self):
"(compatonly) for code expecting pytest-2.2 style request objects"
return self
@property
def funcargnames(self):
""" alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
warnings.warn(FUNCARGNAMES, stacklevel=2)
return self.fixturenames
def runtest(self) -> None:
""" execute the underlying test function. """
self.ihook.pytest_pyfunc_call(pyfuncitem=self)
def setup(self) -> None:
if isinstance(self.parent, Instance):
self.parent.newinstance()
self.obj = self._getobj()
fixtures.fillfixtures(self)
def _prunetraceback(self, excinfo: ExceptionInfo) -> None:
if hasattr(self, "_obj") and not self.config.getoption("fulltrace", False):
code = _pytest._code.Code(get_real_func(self.obj))
path, firstlineno = code.path, code.firstlineno
traceback = excinfo.traceback
ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
if ntraceback == traceback:
ntraceback = ntraceback.cut(path=path)
if ntraceback == traceback:
ntraceback = ntraceback.filter(filter_traceback)
if not ntraceback:
ntraceback = traceback
excinfo.traceback = ntraceback.filter()
# issue364: mark all but first and last frames to
# only show a single-line message for each frame
if self.config.getoption("tbstyle", "auto") == "auto":
if len(excinfo.traceback) > 2:
for entry in excinfo.traceback[1:-1]:
entry.set_repr_style("short")
def repr_failure(self, excinfo, outerr=None):
assert outerr is None, "XXX outerr usage is deprecated"
style = self.config.getoption("tbstyle", "auto")
if style == "auto":
style = "long"
return self._repr_failure_py(excinfo, style=style)
class FunctionDefinition(Function):
"""
internal hack until we get actual definition nodes instead of the
crappy metafunc hack
"""
def runtest(self) -> None:
raise RuntimeError("function definitions are not supposed to be used")
setup = runtest
| alfredodeza/pytest | src/_pytest/python.py | Python | mit | 58,223 |
from flask import Blueprint
__author__ = 'Manuel Escriche'
urgent = Blueprint('urgent', __name__)
from . import views
| flopezag/fiware-backlog | app/urgent/__init__.py | Python | apache-2.0 | 121 |
from tower import ugettext_lazy as _
APP_STEPS = [
('terms', _('Agreement')),
('manifest', _('Submit')),
('details', _('Details')),
('done', _('Done!')),
# ('next_steps', _('Next Steps')),
]
APP_STEPS_TITLE = dict(APP_STEPS)
# Preview sizes in the format (width, height, type)
APP_PREVIEW_MINIMUMS = (320, 480)
APP_PREVIEW_SIZES = [
(180, 270, 'mobile'),
(700, 1050, 'full'), # Because it's proportional, that's why.
]
MAX_PACKAGED_APP_SIZE = 50 * 1024 * 1024 # 50MB
| Joergen/zamboni | mkt/constants/submit.py | Python | bsd-3-clause | 501 |
import numpy
from chainer import cuda
from chainer import distributions
from chainer import testing
from chainer.testing import array
@testing.parameterize(*testing.product({
'shape': [(3, 2), (1,)],
'is_variable': [True, False],
'sample_shape': [(3, 2), ()],
# 'extreme_values': [True, False],
'extreme_values': [False],
'logit_option': [True, False]
}))
@testing.fix_random()
@testing.with_requires('scipy>=0.19.0')
class TestCategorical(testing.distribution_unittest):
scipy_onebyone = True
def setUp_configure(self):
from scipy import stats
self.dist = distributions.Categorical
self.scipy_dist = stats.multinomial
self.test_targets = set([
"batch_shape", "event_shape", "entropy", "log_prob", "sample"])
if self.logit_option:
if self.extreme_values:
logit = -numpy.inf \
* numpy.ones((3,)+self.shape).astype(numpy.float32)
logit[0] = 0.
logit = numpy.rollaxis(logit, 0, logit.ndim)
else:
logit = numpy.random.normal(
size=self.shape+(3,)).astype(numpy.float32)
p = numpy.exp(logit)
p /= numpy.expand_dims(p.sum(axis=-1), axis=-1)
self.params = {"logit": logit}
else:
if self.extreme_values:
p = numpy.zeros((3,)+self.shape).astype(numpy.float32)
p[0] = 1.
p = numpy.rollaxis(p, 0, p.ndim)
else:
logit = numpy.random.normal(
size=self.shape+(3,)).astype(numpy.float32)
p = numpy.exp(logit)
p /= numpy.expand_dims(p.sum(axis=-1), axis=-1)
self.params = {"p": p}
n = numpy.ones(self.shape)
self.scipy_params = {"n": n, "p": p}
self.continuous = False
self.old_settings = None
if self.extreme_values:
self.old_settings = numpy.seterr(divide='ignore', invalid='ignore')
def tearDown(self):
if self.old_settings is not None:
numpy.seterr(**self.old_settings)
def sample_for_test(self):
smp = numpy.random.randint(
0, 3, self.sample_shape + self.shape).astype(numpy.int32)
return smp
def check_log_prob(self, is_gpu):
smp = self.sample_for_test()
if is_gpu:
log_prob1 = self.gpu_dist.log_prob(cuda.to_gpu(smp)).data
else:
log_prob1 = self.cpu_dist.log_prob(smp).data
scipy_prob = self.scipy_dist.logpmf
onebyone_smp = smp.reshape(
(int(numpy.prod(self.sample_shape)),
numpy.prod(self.shape),
int(numpy.prod(self.event_shape))))
onebyone_smp = numpy.swapaxes(onebyone_smp, 0, 1)
onebyone_smp = onebyone_smp.reshape((-1,) + self.sample_shape
+ self.event_shape)
log_prob2 = []
for one_params, one_smp in zip(
self.scipy_onebyone_params_iter(), onebyone_smp):
one_smp = numpy.eye(3)[one_smp]
log_prob2.append(scipy_prob(one_smp, **one_params))
log_prob2 = numpy.vstack(log_prob2)
log_prob2 = log_prob2.reshape(numpy.prod(self.shape), -1).T
log_prob2 = log_prob2.reshape(self.sample_shape + self.shape)
array.assert_allclose(log_prob1, log_prob2)
def check_sample(self, is_gpu):
if is_gpu:
smp1 = self.gpu_dist.sample(
sample_shape=(100000,)+self.sample_shape).data
else:
smp1 = self.cpu_dist.sample(
sample_shape=(100000,)+self.sample_shape).data
smp2 = []
for one_params in self.scipy_onebyone_params_iter():
smp2.append(self.scipy_dist.rvs(
size=(100000,)+self.sample_shape, **one_params))
smp2 = numpy.vstack(smp2)
smp2 = smp2.dot(numpy.arange(3))
smp2 = smp2.reshape((numpy.prod(self.shape), 100000)
+ self.sample_shape
+ self.cpu_dist.event_shape)
smp2 = numpy.rollaxis(
smp2, 0, smp2.ndim-len(self.cpu_dist.event_shape))
smp2 = smp2.reshape((100000,) + self.sample_shape + self.shape
+ self.cpu_dist.event_shape)
array.assert_allclose(smp1.mean(axis=0), smp2.mean(axis=0),
atol=3e-2, rtol=3e-2)
array.assert_allclose(smp1.std(axis=0), smp2.std(axis=0),
atol=3e-2, rtol=3e-2)
testing.run_module(__name__, __file__)
| ktnyt/chainer | tests/chainer_tests/distributions_tests/test_categorical.py | Python | mit | 4,637 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""train the dialogue model."""
import math
import os
import time
import tensorflow.compat.v1 as tf
import model as diag_model
import model_helper
from utils import misc_utils as utils
def train(hparams, identity, scope=None, target_session=""):
"""main loop to train the dialogue model. identity is used."""
out_dir = hparams.out_dir
steps_per_stats = hparams.steps_per_stats
steps_per_internal_eval = 3 * steps_per_stats
model_creator = diag_model.Model
train_model = model_helper.create_train_model(model_creator, hparams, scope)
model_dir = hparams.out_dir
# Log and output files
log_file = os.path.join(out_dir, identity+"log_%d" % time.time())
log_f = tf.gfile.GFile(log_file, mode="a")
utils.print_out("# log_file=%s" % log_file, log_f)
avg_step_time = 0.0
# load TensorFlow session and model
config_proto = utils.get_config_proto(
log_device_placement=hparams.log_device_placement,
allow_soft_placement=True)
train_sess = tf.Session(
target=target_session, config=config_proto, graph=train_model.graph)
train_handle = train_sess.run(train_model.train_iterator.string_handle())
with train_model.graph.as_default():
loaded_train_model, global_step = model_helper.create_or_load_model(
train_model.model, model_dir, train_sess, "train")
# initialize summary writer
summary_writer = tf.summary.FileWriter(
os.path.join(out_dir, "train_log"), train_model.graph)
last_stats_step = global_step
last_eval_step = global_step
# initialize training stats.
step_time, checkpoint_loss, checkpoint_predict_count = 0.0, 0.0, 0.0
checkpoint_total_count = 0.0
speed, train_ppl = 0.0, 0.0
start_train_time = time.time()
utils.print_out(
"# Start step %d, lr %g, %s" %
(global_step, loaded_train_model.learning_rate.eval(session=train_sess),
time.ctime()),
log_f)
# initialize iterators
skip_count = hparams.batch_size * hparams.epoch_step
utils.print_out("# Init train iterator, skipping %d elements" % skip_count)
train_sess.run(
train_model.train_iterator.initializer,
feed_dict={train_model.skip_count_placeholder: skip_count})
# main training loop
while global_step < hparams.num_train_steps:
start_time = time.time()
try: # run a step
step_result = loaded_train_model.train(train_sess, train_handle)
(_, step_loss, all_summaries, step_predict_count, step_summary,
global_step, step_word_count, batch_size, _, _, words1, words2, mask1,
mask2) = step_result
hparams.epoch_step += 1
except tf.errors.OutOfRangeError: # finished an epoch
hparams.epoch_step = 0
utils.print_out("# Finished an epoch, step %d." % global_step)
train_sess.run(
train_model.train_iterator.initializer,
feed_dict={train_model.skip_count_placeholder: 0})
continue
# Write step summary.
summary_writer.add_summary(step_summary, global_step)
for key in all_summaries:
utils.add_summary(summary_writer, global_step, key, all_summaries[key])
# update statistics
step_time += (time.time() - start_time)
checkpoint_loss += (step_loss * batch_size)
checkpoint_predict_count += step_predict_count
checkpoint_total_count += float(step_word_count)
if global_step - last_stats_step >= steps_per_stats:
# print statistics for the previous epoch and save the model.
last_stats_step = global_step
avg_step_time = step_time / steps_per_stats
utils.add_summary(summary_writer, global_step, "step_time", avg_step_time)
train_ppl = utils.safe_exp(checkpoint_loss / checkpoint_predict_count)
speed = checkpoint_total_count / (1000 * step_time)
if math.isnan(train_ppl):
break
# Reset timer and loss.
step_time, checkpoint_loss, checkpoint_predict_count = 0.0, 0.0, 0.0
checkpoint_total_count = 0.0
# save the model
loaded_train_model.saver.save(
train_sess,
os.path.join(out_dir, "dialogue.ckpt"),
global_step=global_step)
# print the dialogue if in debug mode
if hparams.debug:
utils.print_current_dialogue(words1, words2, mask1, mask2)
# write out internal evaluation
if global_step - last_eval_step >= steps_per_internal_eval:
last_eval_step = global_step
utils.print_out("# Internal Evaluation. global step %d" % global_step)
utils.add_summary(summary_writer, global_step, "train_ppl", train_ppl)
# finished training
loaded_train_model.saver.save(
train_sess,
os.path.join(out_dir, "dialogue.ckpt"),
global_step=global_step)
result_summary = ""
utils.print_out(
"# Final, step %d lr %g "
"step-time %.2f wps %.2fK ppl %.2f, %s, %s" %
(global_step, loaded_train_model.learning_rate.eval(session=train_sess),
avg_step_time, speed, train_ppl, result_summary, time.ctime()),
log_f)
utils.print_time("# Done training!", start_train_time)
utils.print_out("# Start evaluating saved best models.")
summary_writer.close()
| google/airdialogue_model | train.py | Python | apache-2.0 | 5,663 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import get_single_element
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class GetSingleElementTest(test_base.DatasetTestBase, parameterized.TestCase):
@parameterized.named_parameters(
("Zero", 0, 1),
("Five", 5, 1),
("Ten", 10, 1),
("Empty", 100, 1, errors.InvalidArgumentError, "Dataset was empty."),
("MoreThanOne", 0, 2, errors.InvalidArgumentError,
"Dataset had more than one element."),
)
def testGetSingleElement(self, skip, take, error=None, error_msg=None):
def make_sparse(x):
x_1d = array_ops.reshape(x, [1])
x_2d = array_ops.reshape(x, [1, 1])
return sparse_tensor.SparseTensor(x_2d, x_1d, x_1d)
dataset = dataset_ops.Dataset.range(100).skip(
skip).map(lambda x: (x * x, make_sparse(x))).take(take)
if error is None:
dense_val, sparse_val = self.evaluate(
get_single_element.get_single_element(dataset))
self.assertEqual(skip * skip, dense_val)
self.assertAllEqual([[skip]], sparse_val.indices)
self.assertAllEqual([skip], sparse_val.values)
self.assertAllEqual([skip], sparse_val.dense_shape)
else:
with self.assertRaisesRegexp(error, error_msg):
self.evaluate(get_single_element.get_single_element(dataset))
def testWindow(self):
"""Test that `get_single_element()` can consume a nested dataset."""
def flat_map_func(ds):
batched = ds.batch(2)
element = get_single_element.get_single_element(batched)
return dataset_ops.Dataset.from_tensors(element)
dataset = dataset_ops.Dataset.range(10).window(2).flat_map(flat_map_func)
self.assertDatasetProduces(
dataset, [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]])
if __name__ == "__main__":
test.main()
| hfp/tensorflow-xsmm | tensorflow/python/data/experimental/kernel_tests/get_single_element_test.py | Python | apache-2.0 | 3,032 |
#-*- coding:utf-8 -*-
"""
This file is part of OpenSesame.
OpenSesame is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenSesame is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenSesame. If not, see <http://www.gnu.org/licenses/>.
"""
from libopensesame.sketchpad import sketchpad as sketchpad_runtime
from libqtopensesame.items.qtplugin import qtplugin
from libqtopensesame.items.feedpad import feedpad
class sketchpad(feedpad, qtplugin, sketchpad_runtime):
"""
desc:
The sketchpad controls are implemented in feedpad.
"""
def __init__(self, name, experiment, string=None):
sketchpad_runtime.__init__(self, name, experiment, string)
qtplugin.__init__(self)
def init_edit_widget(self):
"""
desc:
Initializes the widget.
"""
feedpad.init_edit_widget(self)
self.sketchpad_widget.ui.widget_settings_reset_variables.hide()
| amazinger2013/OpenSesame | libqtopensesame/items/sketchpad.py | Python | gpl-3.0 | 1,289 |
# Copyright 1999-2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from __future__ import division
import errno
import logging
import os
import select
import signal
import sys
import time
try:
import fcntl
except ImportError:
# http://bugs.jython.org/issue1074
fcntl = None
try:
import threading
except ImportError:
import dummy_threading as threading
from portage.util import writemsg_level
from ..SlotObject import SlotObject
from .PollConstants import PollConstants
from .PollSelectAdapter import PollSelectAdapter
class EventLoop(object):
"""
An event loop, intended to be compatible with the GLib event loop.
Call the iteration method in order to execute one iteration of the
loop. The idle_add and timeout_add methods serve as thread-safe
means to interact with the loop's thread.
"""
supports_multiprocessing = True
# TODO: Find out why SIGCHLD signals aren't delivered during poll
# calls, forcing us to wakeup in order to receive them.
_sigchld_interval = 250
class _child_callback_class(SlotObject):
__slots__ = ("callback", "data", "pid", "source_id")
class _idle_callback_class(SlotObject):
__slots__ = ("args", "callback", "calling", "source_id")
class _io_handler_class(SlotObject):
__slots__ = ("args", "callback", "f", "source_id")
class _timeout_handler_class(SlotObject):
__slots__ = ("args", "function", "calling", "interval", "source_id",
"timestamp")
def __init__(self, main=True):
"""
@param main: If True then this is a singleton instance for use
in the main thread, otherwise it is a local instance which
can safely be use in a non-main thread (default is True, so
that global_event_loop does not need constructor arguments)
@type main: bool
"""
self._use_signal = main and fcntl is not None
self._thread_rlock = threading.RLock()
self._thread_condition = threading.Condition(self._thread_rlock)
self._poll_event_queue = []
self._poll_event_handlers = {}
self._poll_event_handler_ids = {}
# Increment id for each new handler.
self._event_handler_id = 0
self._idle_callbacks = {}
self._timeout_handlers = {}
self._timeout_interval = None
self._poll_obj = None
try:
select.epoll
except AttributeError:
pass
else:
try:
epoll_obj = select.epoll()
except IOError:
# This happens with Linux 2.4 kernels:
# IOError: [Errno 38] Function not implemented
pass
else:
# FD_CLOEXEC is enabled by default in Python >=3.4.
if sys.hexversion < 0x3040000 and fcntl is not None:
try:
fcntl.FD_CLOEXEC
except AttributeError:
pass
else:
fcntl.fcntl(epoll_obj.fileno(), fcntl.F_SETFD,
fcntl.fcntl(epoll_obj.fileno(),
fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
self._poll_obj = _epoll_adapter(epoll_obj)
self.IO_ERR = select.EPOLLERR
self.IO_HUP = select.EPOLLHUP
self.IO_IN = select.EPOLLIN
self.IO_NVAL = 0
self.IO_OUT = select.EPOLLOUT
self.IO_PRI = select.EPOLLPRI
if self._poll_obj is None:
self._poll_obj = create_poll_instance()
self.IO_ERR = PollConstants.POLLERR
self.IO_HUP = PollConstants.POLLHUP
self.IO_IN = PollConstants.POLLIN
self.IO_NVAL = PollConstants.POLLNVAL
self.IO_OUT = PollConstants.POLLOUT
self.IO_PRI = PollConstants.POLLPRI
self._child_handlers = {}
self._sigchld_read = None
self._sigchld_write = None
self._sigchld_src_id = None
self._pid = os.getpid()
def _new_source_id(self):
"""
Generate a new source id. This method is thread-safe.
"""
with self._thread_rlock:
self._event_handler_id += 1
return self._event_handler_id
def _poll(self, timeout=None):
"""
All poll() calls pass through here. The poll events
are added directly to self._poll_event_queue.
In order to avoid endless blocking, this raises
StopIteration if timeout is None and there are
no file descriptors to poll.
"""
if timeout is None and \
not self._poll_event_handlers:
raise StopIteration(
"timeout is None and there are no poll() event handlers")
while True:
try:
self._poll_event_queue.extend(self._poll_obj.poll(timeout))
break
except (IOError, select.error) as e:
# Silently handle EINTR, which is normal when we have
# received a signal such as SIGINT (epoll objects may
# raise IOError rather than select.error, at least in
# Python 3.2).
if not (e.args and e.args[0] == errno.EINTR):
writemsg_level("\n!!! select error: %s\n" % (e,),
level=logging.ERROR, noiselevel=-1)
del e
# This typically means that we've received a SIGINT, so
# raise StopIteration in order to break out of our current
# iteration and respond appropriately to the signal as soon
# as possible.
raise StopIteration("interrupted")
def iteration(self, *args):
"""
Like glib.MainContext.iteration(), runs a single iteration. In order
to avoid blocking forever when may_block is True (the default),
callers must be careful to ensure that at least one of the following
conditions is met:
1) An event source or timeout is registered which is guaranteed
to trigger at least on event (a call to an idle function
only counts as an event if it returns a False value which
causes it to stop being called)
2) Another thread is guaranteed to call one of the thread-safe
methods which notify iteration to stop waiting (such as
idle_add or timeout_add).
These rules ensure that iteration is able to block until an event
arrives, without doing any busy waiting that would waste CPU time.
@type may_block: bool
@param may_block: if True the call may block waiting for an event
(default is True).
@rtype: bool
@return: True if events were dispatched.
"""
may_block = True
if args:
if len(args) > 1:
raise TypeError(
"expected at most 1 argument (%s given)" % len(args))
may_block = args[0]
event_queue = self._poll_event_queue
event_handlers = self._poll_event_handlers
events_handled = 0
timeouts_checked = False
if not event_handlers:
with self._thread_condition:
if self._run_timeouts():
events_handled += 1
timeouts_checked = True
if not event_handlers and not events_handled and may_block:
# Block so that we don't waste cpu time by looping too
# quickly. This makes EventLoop useful for code that needs
# to wait for timeout callbacks regardless of whether or
# not any IO handlers are currently registered.
timeout = self._get_poll_timeout()
if timeout is None:
wait_timeout = None
else:
wait_timeout = timeout / 1000
# NOTE: In order to avoid a possible infinite wait when
# wait_timeout is None, the previous _run_timeouts()
# call must have returned False *with* _thread_condition
# acquired. Otherwise, we would risk going to sleep after
# our only notify event has already passed.
self._thread_condition.wait(wait_timeout)
if self._run_timeouts():
events_handled += 1
timeouts_checked = True
# If any timeouts have executed, then return immediately,
# in order to minimize latency in termination of iteration
# loops that they may control.
if events_handled or not event_handlers:
return bool(events_handled)
if not event_queue:
if may_block:
timeout = self._get_poll_timeout()
# Avoid blocking for IO if there are any timeout
# or idle callbacks available to process.
if timeout != 0 and not timeouts_checked:
if self._run_timeouts():
events_handled += 1
timeouts_checked = True
if events_handled:
# Minimize latency for loops controlled
# by timeout or idle callback events.
timeout = 0
else:
timeout = 0
try:
self._poll(timeout=timeout)
except StopIteration:
# This can be triggered by EINTR which is caused by signals.
pass
# NOTE: IO event handlers may be re-entrant, in case something
# like AbstractPollTask._wait_loop() needs to be called inside
# a handler for some reason.
while event_queue:
events_handled += 1
f, event = event_queue.pop()
try:
x = event_handlers[f]
except KeyError:
# This is known to be triggered by the epoll
# implementation in qemu-user-1.2.2, and appears
# to be harmless (see bug #451326).
continue
if not x.callback(f, event, *x.args):
self.source_remove(x.source_id)
if not timeouts_checked:
if self._run_timeouts():
events_handled += 1
timeouts_checked = True
return bool(events_handled)
def _get_poll_timeout(self):
with self._thread_rlock:
if self._child_handlers:
if self._timeout_interval is None:
timeout = self._sigchld_interval
else:
timeout = min(self._sigchld_interval,
self._timeout_interval)
else:
timeout = self._timeout_interval
return timeout
def child_watch_add(self, pid, callback, data=None):
"""
Like glib.child_watch_add(), sets callback to be called with the
user data specified by data when the child indicated by pid exits.
The signature for the callback is:
def callback(pid, condition, user_data)
where pid is is the child process id, condition is the status
information about the child process and user_data is data.
@type int
@param pid: process id of a child process to watch
@type callback: callable
@param callback: a function to call
@type data: object
@param data: the optional data to pass to function
@rtype: int
@return: an integer ID
"""
source_id = self._new_source_id()
self._child_handlers[source_id] = self._child_callback_class(
callback=callback, data=data, pid=pid, source_id=source_id)
if self._use_signal:
if self._sigchld_read is None:
self._sigchld_read, self._sigchld_write = os.pipe()
fcntl.fcntl(self._sigchld_read, fcntl.F_SETFL,
fcntl.fcntl(self._sigchld_read,
fcntl.F_GETFL) | os.O_NONBLOCK)
# FD_CLOEXEC is enabled by default in Python >=3.4.
if sys.hexversion < 0x3040000:
try:
fcntl.FD_CLOEXEC
except AttributeError:
pass
else:
fcntl.fcntl(self._sigchld_read, fcntl.F_SETFD,
fcntl.fcntl(self._sigchld_read,
fcntl.F_GETFD) | fcntl.FD_CLOEXEC)
# The IO watch is dynamically registered and unregistered as
# needed, since we don't want to consider it as a valid source
# of events when there are no child listeners. It's important
# to distinguish when there are no valid sources of IO events,
# in order to avoid an endless poll call if there's no timeout.
if self._sigchld_src_id is None:
self._sigchld_src_id = self.io_add_watch(
self._sigchld_read, self.IO_IN, self._sigchld_io_cb)
signal.signal(signal.SIGCHLD, self._sigchld_sig_cb)
# poll now, in case the SIGCHLD has already arrived
self._poll_child_processes()
return source_id
def _sigchld_sig_cb(self, signum, frame):
# If this signal handler was not installed by the
# current process then the signal doesn't belong to
# this EventLoop instance.
if os.getpid() == self._pid:
os.write(self._sigchld_write, b'\0')
def _sigchld_io_cb(self, fd, events):
try:
while True:
os.read(self._sigchld_read, 4096)
except OSError:
# read until EAGAIN
pass
self._poll_child_processes()
return True
def _poll_child_processes(self):
if not self._child_handlers:
return False
calls = 0
for x in list(self._child_handlers.values()):
if x.source_id not in self._child_handlers:
# it's already been called via re-entrance
continue
try:
wait_retval = os.waitpid(x.pid, os.WNOHANG)
except OSError as e:
if e.errno != errno.ECHILD:
raise
del e
self.source_remove(x.source_id)
else:
# With waitpid and WNOHANG, only check the
# first element of the tuple since the second
# element may vary (bug #337465).
if wait_retval[0] != 0:
calls += 1
self.source_remove(x.source_id)
x.callback(x.pid, wait_retval[1], x.data)
return bool(calls)
def idle_add(self, callback, *args):
"""
Like glib.idle_add(), if callback returns False it is
automatically removed from the list of event sources and will
not be called again. This method is thread-safe.
@type callback: callable
@param callback: a function to call
@rtype: int
@return: an integer ID
"""
with self._thread_condition:
source_id = self._new_source_id()
self._idle_callbacks[source_id] = self._idle_callback_class(
args=args, callback=callback, source_id=source_id)
self._thread_condition.notify()
return source_id
def _run_idle_callbacks(self):
# assumes caller has acquired self._thread_rlock
if not self._idle_callbacks:
return False
state_change = 0
# Iterate of our local list, since self._idle_callbacks can be
# modified during the exection of these callbacks.
for x in list(self._idle_callbacks.values()):
if x.source_id not in self._idle_callbacks:
# it got cancelled while executing another callback
continue
if x.calling:
# don't call it recursively
continue
x.calling = True
try:
if not x.callback(*x.args):
state_change += 1
self.source_remove(x.source_id)
finally:
x.calling = False
return bool(state_change)
def timeout_add(self, interval, function, *args):
"""
Like glib.timeout_add(), interval argument is the number of
milliseconds between calls to your function, and your function
should return False to stop being called, or True to continue
being called. Any additional positional arguments given here
are passed to your function when it's called. This method is
thread-safe.
"""
with self._thread_condition:
source_id = self._new_source_id()
self._timeout_handlers[source_id] = \
self._timeout_handler_class(
interval=interval, function=function, args=args,
source_id=source_id, timestamp=time.time())
if self._timeout_interval is None or \
self._timeout_interval > interval:
self._timeout_interval = interval
self._thread_condition.notify()
return source_id
def _run_timeouts(self):
calls = 0
if not self._use_signal:
if self._poll_child_processes():
calls += 1
with self._thread_rlock:
if self._run_idle_callbacks():
calls += 1
if not self._timeout_handlers:
return bool(calls)
ready_timeouts = []
current_time = time.time()
for x in self._timeout_handlers.values():
elapsed_seconds = current_time - x.timestamp
# elapsed_seconds < 0 means the system clock has been adjusted
if elapsed_seconds < 0 or \
(x.interval - 1000 * elapsed_seconds) <= 0:
ready_timeouts.append(x)
# Iterate of our local list, since self._timeout_handlers can be
# modified during the exection of these callbacks.
for x in ready_timeouts:
if x.source_id not in self._timeout_handlers:
# it got cancelled while executing another timeout
continue
if x.calling:
# don't call it recursively
continue
calls += 1
x.calling = True
try:
x.timestamp = time.time()
if not x.function(*x.args):
self.source_remove(x.source_id)
finally:
x.calling = False
return bool(calls)
def io_add_watch(self, f, condition, callback, *args):
"""
Like glib.io_add_watch(), your function should return False to
stop being called, or True to continue being called. Any
additional positional arguments given here are passed to your
function when it's called.
@type f: int or object with fileno() method
@param f: a file descriptor to monitor
@type condition: int
@param condition: a condition mask
@type callback: callable
@param callback: a function to call
@rtype: int
@return: an integer ID of the event source
"""
if f in self._poll_event_handlers:
raise AssertionError("fd %d is already registered" % f)
source_id = self._new_source_id()
self._poll_event_handler_ids[source_id] = f
self._poll_event_handlers[f] = self._io_handler_class(
args=args, callback=callback, f=f, source_id=source_id)
self._poll_obj.register(f, condition)
return source_id
def source_remove(self, reg_id):
"""
Like glib.source_remove(), this returns True if the given reg_id
is found and removed, and False if the reg_id is invalid or has
already been removed.
"""
x = self._child_handlers.pop(reg_id, None)
if x is not None:
if not self._child_handlers and self._use_signal:
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
self.source_remove(self._sigchld_src_id)
self._sigchld_src_id = None
return True
with self._thread_rlock:
idle_callback = self._idle_callbacks.pop(reg_id, None)
if idle_callback is not None:
return True
timeout_handler = self._timeout_handlers.pop(reg_id, None)
if timeout_handler is not None:
if timeout_handler.interval == self._timeout_interval:
if self._timeout_handlers:
self._timeout_interval = min(x.interval
for x in self._timeout_handlers.values())
else:
self._timeout_interval = None
return True
f = self._poll_event_handler_ids.pop(reg_id, None)
if f is None:
return False
self._poll_obj.unregister(f)
if self._poll_event_queue:
# Discard any unhandled events that belong to this file,
# in order to prevent these events from being erroneously
# delivered to a future handler that is using a reallocated
# file descriptor of the same numeric value (causing
# extremely confusing bugs).
remaining_events = []
discarded_events = False
for event in self._poll_event_queue:
if event[0] == f:
discarded_events = True
else:
remaining_events.append(event)
if discarded_events:
self._poll_event_queue[:] = remaining_events
del self._poll_event_handlers[f]
return True
_can_poll_device = None
def can_poll_device():
"""
Test if it's possible to use poll() on a device such as a pty. This
is known to fail on Darwin.
@rtype: bool
@return: True if poll() on a device succeeds, False otherwise.
"""
global _can_poll_device
if _can_poll_device is not None:
return _can_poll_device
if not hasattr(select, "poll"):
_can_poll_device = False
return _can_poll_device
try:
dev_null = open('/dev/null', 'rb')
except IOError:
_can_poll_device = False
return _can_poll_device
p = select.poll()
try:
p.register(dev_null.fileno(), PollConstants.POLLIN)
except TypeError:
# Jython: Object 'org.python.core.io.FileIO@f8f175' is not watchable
_can_poll_device = False
return _can_poll_device
invalid_request = False
for f, event in p.poll():
if event & PollConstants.POLLNVAL:
invalid_request = True
break
dev_null.close()
_can_poll_device = not invalid_request
return _can_poll_device
def create_poll_instance():
"""
Create an instance of select.poll, or an instance of
PollSelectAdapter there is no poll() implementation or
it is broken somehow.
"""
if can_poll_device():
return select.poll()
return PollSelectAdapter()
class _epoll_adapter(object):
"""
Wraps a select.epoll instance in order to make it compatible
with select.poll instances. This is necessary since epoll instances
interpret timeout arguments differently. Note that the file descriptor
that is associated with an epoll instance will close automatically when
it is garbage collected, so it's not necessary to close it explicitly.
"""
__slots__ = ('_epoll_obj',)
def __init__(self, epoll_obj):
self._epoll_obj = epoll_obj
def register(self, fd, *args):
self._epoll_obj.register(fd, *args)
def unregister(self, fd):
self._epoll_obj.unregister(fd)
def poll(self, *args):
if len(args) > 1:
raise TypeError(
"poll expected at most 2 arguments, got " + \
repr(1 + len(args)))
timeout = -1
if args:
timeout = args[0]
if timeout is None or timeout < 0:
timeout = -1
elif timeout != 0:
timeout = timeout / 1000
return self._epoll_obj.poll(timeout)
| ptisserand/portage | pym/portage/util/_eventloop/EventLoop.py | Python | gpl-2.0 | 19,948 |
#!/usr/bin/python3
class Formatter(object):
"""Formatter class"""
def __init__(self):
self.formatters = {}
def add(self, formatter):
"""add new formatter"""
self.formatters[formatter.get_name()] = formatter
def format(self, name, content):
"""calls proper formatter and returns content"""
return self.formatters[name].format(content)
def get_names(self):
return self.formatters.keys()
| bkosciow/proxy_lcd | service/format.py | Python | mit | 458 |
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: send_sms_to_multiple_recipients_sample.py
DESCRIPTION:
This sample demonstrates sending an SMS message to multiple recipients. The SMS client is
authenticated using a connection string.
USAGE:
python send_sms_to_multiple_recipients_sample.py
Set the environment variable with your own value before running the sample:
1) COMMUNICATION_SAMPLES_CONNECTION_STRING - the connection string in your ACS resource
2) AZURE_PHONE_NUMBER - a phone number with SMS capabilities in your ACS resource
"""
import os
import sys
from azure.communication.sms import SmsClient
sys.path.append("..")
class SmsMultipleRecipientsSample(object):
connection_string = os.getenv("COMMUNICATION_SAMPLES_CONNECTION_STRING")
phone_number = os.getenv("AZURE_PHONE_NUMBER")
def send_sms_to_multiple_recipients(self):
sms_client = SmsClient.from_connection_string(self.connection_string)
# calling send() with sms values
sms_responses = sms_client.send(
from_=self.phone_number,
to=[self.phone_number, self.phone_number],
message="Hello World via SMS",
enable_delivery_report=True, # optional property
tag="custom-tag") # optional property
for sms_response in sms_responses:
if (sms_response.successful):
print("Message with message id {} was successful sent to {}"
.format(sms_response.message_id, sms_response.to))
else:
print("Message failed to send to {} with the status code {} and error: {}"
.format(sms_response.to, sms_response.http_status_code, sms_response.error_message))
if __name__ == '__main__':
sample = SmsMultipleRecipientsSample()
sample.send_sms_to_multiple_recipients()
| Azure/azure-sdk-for-python | sdk/communication/azure-communication-sms/samples/send_sms_to_multiple_recipients_sample.py | Python | mit | 2,146 |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.extensions_v1beta1_deployment_strategy import ExtensionsV1beta1DeploymentStrategy
class TestExtensionsV1beta1DeploymentStrategy(unittest.TestCase):
""" ExtensionsV1beta1DeploymentStrategy unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testExtensionsV1beta1DeploymentStrategy(self):
"""
Test ExtensionsV1beta1DeploymentStrategy
"""
model = kubernetes.client.models.extensions_v1beta1_deployment_strategy.ExtensionsV1beta1DeploymentStrategy()
if __name__ == '__main__':
unittest.main()
| skuda/client-python | kubernetes/test/test_extensions_v1beta1_deployment_strategy.py | Python | apache-2.0 | 1,021 |
from settings import *
import dj_database_url
import sys
import os
from json import load
DEBUG = False
print >> sys.stderr, "Using Heroku Settings"
try:
APP_INFO = load(open(BASE_DIR + "/app_info.json"))['staging']
except:
print "Failed to load app_info.json"
APP_INFO = {}
print "using appinfo: ", APP_INFO
if APP_INFO.get('project_name') and APP_INFO.get('branch_name'):
STATIC_PREPEND_PATH = '/{}/{}'.format(APP_INFO.get('project_name'), APP_INFO.get('branch_name'))
else:
STATIC_PREPEND_PATH = ''
DATABASES = {
'default': dj_database_url.config(default='postgres://localhost'),
}
# this setting can be set to False after setting up a static file serve through a cdn
SERVE_STATIC = True
# AWS settings
AWS_ACCESS_KEY_ID = os.environ.get('HAUS_AWS_ACCESS_KEY_ID','')
AWS_SECRET_ACCESS_KEY = os.environ.get('HAUS_AWS_SECRET_ACCESS_KEY','')
AWS_BUCKET_NAME = AWS_STORAGE_BUCKET_NAME = '__BUCKET_NAME__-staging'
# suppress bucket auth via accesskeys
AWS_QUERYSTRING_AUTH = False
ASSET_PROTOCOL = 'https' if USE_HTTPS_FOR_ASSETS else 'http'
USE_RELATIVE_STATIC_URL = os.environ.get('USE_RELATIVE_STATIC_URL', False)
if USE_RELATIVE_STATIC_URL:
STATIC_URL = '/'
MEDIA_URL = '/uploads/'
else:
STATIC_URL = '{}://s3.amazonaws.com/{}/'.format(ASSET_PROTOCOL, AWS_STORAGE_BUCKET_NAME)
MEDIA_URL = '{}://s3.amazonaws.com/{}/uploads/'.format(ASSET_PROTOCOL, AWS_STORAGE_BUCKET_NAME)
STATICFILES_STORAGE = 'utils.storage.OptimizedS3BotoStorage'
DEFAULT_FILE_STORAGE = "utils.storage.MediaRootS3BotoStorage"
INSTALLED_APPS += ('storages',)
ALLOWED_HOSTS += ('{}.herokuapp.com'.format(APP_INFO.get('heroku_app_name','')), ) | MadeInHaus/django-template | backend/settings/hosts/staging.py | Python | mit | 1,683 |
from django import template
from django.shortcuts import render as template_render
from design.quickmenuregistry import quickmenu_registry
register = template.Library()
@register.tag(name="render_quickmenu")
def render_quickmenu(parser, token):
return QuickMenuNode(token)
class QuickMenuNode(template.Node):
def get_request(self,context):
try:
return template.Variable('request').resolve(context)
except template.VariableDoesNotExist:
return None
def __init__(self, format_string):
self.format_string = format_string
self.registry = quickmenu_registry.list()
def render(self, context):
tpl = template.loader.select_template(['quickmenu.html'])
return tpl.render(template.Context({'menu_items': self.registry}))
| django-zfs/django-zfs | design/templatetags/quickmenurenderer.py | Python | gpl-3.0 | 744 |
import json
import click # type: ignore
from collections import OrderedDict
from typing import (
List,
Dict,
TextIO,
Optional,
OrderedDict as tOrderedDict
)
from voluptuous import ( # type: ignore
Schema, Required, MultipleInvalid, ALLOW_EXTRA
)
from ..common_types import SequenceResult, AlignedGeneSeq
GENES: tOrderedDict = OrderedDict([
('PR', 99),
('RT', 560),
('IN', 288)
])
schema: Schema = Schema([{
Required('inputSequence'): {
Required('header'): str
},
Required('alignedGeneSequences'): [{
Required('gene'): {
Required('name'): str
},
Required('firstAA'): int,
Required('lastAA'): int,
Required('alignedNAs'): str,
Required('prettyPairwise'): {
Required('positionLine'): [str],
Required('alignedNAsLine'): [str]
}
}]
}], extra=ALLOW_EXTRA)
@click.option('--gap-handling', default="hxb2strip",
type=click.Choice(['squeeze', 'hxb2strip', 'hxb2stripkeepins']),
help=('Specify how you want the recipe to handle the gaps.\n\n'
'Specify "squeeze" to keep every gap in the result '
'alignment; "hxb2strip" to strip out non-HXB2 columns; '
'"hxb2stripkeepins" to strip not non-HXB2 columns except '
'codon insertions.'))
@click.pass_context
def alignment(ctx: click.Context, gap_handling: str) -> None:
"""Export aligned pol sequences from Sierra result."""
seqheader: str
concat_seqs: str
geneseqs: Dict[str, AlignedGeneSeq]
gene: str
genesize: int
geneseq: Optional[AlignedGeneSeq]
first_aa: int
last_aa: int
naseq: List[str]
posline: List[str]
naline: List[str]
pos: str
nas: str
naseq_text: str
output: TextIO = ctx.obj['OUTPUT']
sequences: List[SequenceResult] = json.load(ctx.obj['INPUT'])
try:
schema(sequences)
except MultipleInvalid as e:
raise click.ClickException(str(e))
for seq in sequences:
seqheader = seq['inputSequence']['header']
concat_seqs = ''
geneseqs = {gs['gene']['name']: gs
for gs in seq['alignedGeneSequences']}
for gene, genesize in GENES.items():
geneseq = geneseqs.get(gene)
if geneseq:
first_aa = geneseq['firstAA']
last_aa = geneseq['lastAA']
if gap_handling.endswith('keepins'):
naseq = []
posline = geneseq['prettyPairwise']['positionLine']
naline = geneseq['prettyPairwise']['alignedNAsLine']
for pos, nas in zip(posline, naline):
if not pos.strip() and ' ' in nas:
# fs insertions
continue
naseq.append(nas)
naseq_text = ''.join(naseq)
else:
naseq_text = geneseq['alignedNAs']
else:
first_aa = 1
last_aa = genesize
naseq_text = '.' * genesize * 3
if gap_handling.startswith('hxb2strip'):
naseq_text = (
('.' * (first_aa - 1) * 3) +
naseq_text +
'.' * (genesize - last_aa) * 3
)
else: # gap_handling == 'squeeze'
raise NotImplementedError()
concat_seqs += naseq_text
output.write('>{}\n{}\n'.format(seqheader, concat_seqs))
| hivdb/sierra-client | python/sierrapy/recipes/alignment.py | Python | mit | 3,603 |
match x:
case ['foo'
'<caret>bar']:
pass | siosio/intellij-community | python/testData/editing/noBackslashOnEnterInStringLiteralInsideSequencePattern.after.py | Python | apache-2.0 | 62 |
#!/usr/bin/env python2
#
# Create a distributable Duktape package into 'dist' directory. The contents
# of this directory can then be packaged into a source distributable.
#
import logging
import sys
logging.basicConfig(level=logging.INFO, stream=sys.stdout, format='%(name)-21s %(levelname)-7s %(message)s')
logger = logging.getLogger('dist.py')
logger.setLevel(logging.INFO)
import os
import re
import json
import shutil
import glob
import optparse
import subprocess
import logging
# Helpers.
def exec_get_stdout(cmd, input=None, default=None, print_stdout=False):
try:
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
ret = proc.communicate(input=input)
if print_stdout:
sys.stdout.write(ret[0])
sys.stdout.flush()
if proc.returncode != 0:
sys.stdout.write(ret[1]) # print stderr on error
sys.stdout.flush()
if default is not None:
logger.warning(' command %r failed, return default' % cmd)
return default
raise Exception('command failed, return code %d: %r' % (proc.returncode, cmd))
return ret[0]
except:
if default is not None:
logger.warning('command %r failed, return default' % cmd)
return default
raise
def exec_print_stdout(cmd, input=None):
ret = exec_get_stdout(cmd, input=input, print_stdout=True)
def mkdir(path):
os.mkdir(path)
def copy_file(src, dst):
with open(src, 'rb') as f_in:
with open(dst, 'wb') as f_out:
f_out.write(f_in.read())
def copy_files(filelist, srcdir, dstdir):
for i in filelist:
copy_file(os.path.join(srcdir, i), os.path.join(dstdir, i))
def copy_and_replace(src, dst, rules):
# Read and write separately to allow in-place replacement
keys = sorted(rules.keys())
res = []
with open(src, 'rb') as f_in:
for line in f_in:
for k in keys:
line = line.replace(k, rules[k])
res.append(line)
with open(dst, 'wb') as f_out:
f_out.write(''.join(res))
def copy_and_cquote(src, dst):
with open(src, 'rb') as f_in:
with open(dst, 'wb') as f_out:
f_out.write('/*\n')
for line in f_in:
line = line.decode('utf-8')
f_out.write(' * ')
for c in line:
if (ord(c) >= 0x20 and ord(c) <= 0x7e) or (c in '\x0a'):
f_out.write(c.encode('ascii'))
else:
f_out.write('\\u%04x' % ord(c))
f_out.write(' */\n')
def read_file(src, strip_last_nl=False):
with open(src, 'rb') as f:
data = f.read()
if len(data) > 0 and data[-1] == '\n':
data = data[:-1]
return data
def delete_matching_files(dirpath, cb):
for fn in os.listdir(dirpath):
if os.path.isfile(os.path.join(dirpath, fn)) and cb(fn):
logger.debug('Deleting %r' % os.path.join(dirpath, fn))
os.unlink(os.path.join(dirpath, fn))
#print('Deleting matching file: %r' % fn)
def glob_files(pattern):
return glob.glob(pattern)
def cstring(x):
return '"' + x + '"' # good enough for now
# Get Duktape version number as an integer. DUK_VERSION is grepped from
# duk_api_public.h.in: it is needed for the public API and we want to avoid
# defining it in multiple places.
def get_duk_version():
r = re.compile(r'^#define\s+DUK_VERSION\s+(.*?)L?\s*$')
with open(os.path.join('src-input', 'duk_api_public.h.in'), 'rb') as f:
for line in f:
m = r.match(line)
if m is not None:
duk_version = int(m.group(1))
duk_major = duk_version / 10000
duk_minor = (duk_version % 10000) / 100
duk_patch = duk_version % 100
duk_version_formatted = '%d.%d.%d' % (duk_major, duk_minor, duk_patch)
return duk_version, duk_major, duk_minor, duk_patch, duk_version_formatted
raise Exception('cannot figure out duktape version')
def create_dist_directories(dist):
if os.path.exists(dist):
raise Exception('dist target directory %s already exists, please delete first' % repr(dist))
mkdir(dist)
mkdir(os.path.join(dist, 'src-input'))
mkdir(os.path.join(dist, 'tools'))
mkdir(os.path.join(dist, 'config'))
mkdir(os.path.join(dist, 'extras'))
mkdir(os.path.join(dist, 'extras', 'duk-v1-compat'))
mkdir(os.path.join(dist, 'extras', 'print-alert'))
mkdir(os.path.join(dist, 'extras', 'console'))
mkdir(os.path.join(dist, 'extras', 'logging'))
mkdir(os.path.join(dist, 'extras', 'minimal-printf'))
mkdir(os.path.join(dist, 'extras', 'module-duktape'))
mkdir(os.path.join(dist, 'extras', 'module-node'))
mkdir(os.path.join(dist, 'extras', 'alloc-pool'))
mkdir(os.path.join(dist, 'polyfills'))
#mkdir(os.path.join(dist, 'doc')) # Empty, so omit
mkdir(os.path.join(dist, 'licenses'))
mkdir(os.path.join(dist, 'debugger'))
mkdir(os.path.join(dist, 'debugger', 'static'))
mkdir(os.path.join(dist, 'examples'))
mkdir(os.path.join(dist, 'examples', 'hello'))
mkdir(os.path.join(dist, 'examples', 'eval'))
mkdir(os.path.join(dist, 'examples', 'cmdline'))
mkdir(os.path.join(dist, 'examples', 'eventloop'))
mkdir(os.path.join(dist, 'examples', 'guide'))
mkdir(os.path.join(dist, 'examples', 'coffee'))
mkdir(os.path.join(dist, 'examples', 'jxpretty'))
mkdir(os.path.join(dist, 'examples', 'sandbox'))
mkdir(os.path.join(dist, 'examples', 'alloc-logging'))
mkdir(os.path.join(dist, 'examples', 'alloc-torture'))
mkdir(os.path.join(dist, 'examples', 'alloc-hybrid'))
mkdir(os.path.join(dist, 'examples', 'debug-trans-socket'))
mkdir(os.path.join(dist, 'examples', 'debug-trans-dvalue'))
mkdir(os.path.join(dist, 'examples', 'codepage-conv'))
mkdir(os.path.join(dist, 'examples', 'dummy-date-provider'))
mkdir(os.path.join(dist, 'examples', 'cpp-exceptions'))
# Spot check a few files to ensure we're in Duktape repo root, as dist only
# works from there.
def check_cwd_duktape_repo_root():
if not (os.path.isfile(os.path.join('src-input', 'duk_api_public.h.in')) and \
os.path.isfile(os.path.join('config', 'platforms.yaml'))):
sys.stderr.write('\n')
sys.stderr.write('*** Working directory must be Duktape repo checkout root!\n')
sys.stderr.write('\n')
raise Exception('Incorrect working directory')
# Option parsing.
def parse_options():
parser = optparse.OptionParser()
parser.add_option('--repo-directory', dest='repo_directory', default=None, help='Duktape repo directory (default is CWD)')
parser.add_option('--output-directory', dest='output_directory', default=None, help='Dist output directory (created automatically, must not exist; default is <repo>/dist)')
parser.add_option('--git-commit', dest='git_commit', default=None, help='Force git commit hash')
parser.add_option('--git-describe', dest='git_describe', default=None, help='Force git describe')
parser.add_option('--git-branch', dest='git_branch', default=None, help='Force git branch name')
parser.add_option('--create-spdx', dest='create_spdx', action='store_true', default=False, help='Create SPDX license file')
parser.add_option('--rom-support', dest='rom_support', action='store_true', help=optparse.SUPPRESS_HELP)
parser.add_option('--rom-auto-lightfunc', dest='rom_auto_lightfunc', action='store_true', default=False, help=optparse.SUPPRESS_HELP)
parser.add_option('--user-builtin-metadata', dest='user_builtin_metadata', action='append', default=[], help=optparse.SUPPRESS_HELP)
parser.add_option('--quiet', dest='quiet', action='store_true', default=False, help='Suppress info messages (show warnings)')
parser.add_option('--verbose', dest='verbose', action='store_true', default=False, help='Show verbose debug messages')
(opts, args) = parser.parse_args()
return opts, args
# Python module check and friendly errors.
def check_python_modules(opts):
# dist.py doesn't need yaml but other dist utils will; check for it and
# warn if it is missing.
failed = False
def _warning(module, aptPackage, pipPackage):
sys.stderr.write('\n')
sys.stderr.write('*** NOTE: Could not "import %s" needed for dist. Install it using e.g.:\n' % module)
sys.stderr.write('\n')
sys.stderr.write(' # Linux\n')
sys.stderr.write(' $ sudo apt-get install %s\n' % aptPackage)
sys.stderr.write('\n')
sys.stderr.write(' # Windows\n')
sys.stderr.write(' > pip install %s\n' % pipPackage)
try:
import yaml
except ImportError:
_warning('yaml', 'python-yaml', 'PyYAML')
failed = True
try:
if opts.create_spdx:
import rdflib
except:
# Tolerate missing rdflib, just warn about it.
_warning('rdflib', 'python-rdflib', 'rdflib')
#failed = True
if failed:
sys.stderr.write('\n')
raise Exception('Missing some required Python modules')
def main():
# Basic option parsing, Python module check, CWD check.
opts, args = parse_options()
# Log level.
forward_loglevel = []
if opts.quiet:
logger.setLevel(logging.WARNING)
forward_loglevel = [ '--quiet' ]
elif opts.verbose:
logger.setLevel(logging.DEBUG)
forward_loglevel = [ '--verbose' ]
check_python_modules(opts)
if opts.repo_directory is None:
opts.repo_directory = os.path.abspath('.')
logger.info('No --repo-directory option, defaulting to current directory %s' % opts.repo_directory)
check_cwd_duktape_repo_root()
opts.repo_directory = os.path.abspath(opts.repo_directory)
logger.debug('Using repo directory: %s' % opts.repo_directory)
if opts.output_directory is None:
opts.output_directory = os.path.abspath(os.path.join(opts.repo_directory, 'dist'))
logger.info('No --output-directory option, defaulting to repo/dist directory %s' % opts.output_directory)
opts.output_directory = os.path.abspath(opts.output_directory)
logger.debug('Using output directory: %s' % opts.output_directory)
# Obsolete options check.
if opts.rom_support or opts.rom_auto_lightfunc:
raise Exception('obsolete ROM support argument(s), use tools/configure.py instead')
if len(opts.user_builtin_metadata) > 0:
raise Exception('obsolete --user-builtin-metadata argument, use tools/configure.py and --builtin-file instead')
# Figure out directories, git info, Duktape version, etc.
entry_cwd = os.getcwd()
dist = opts.output_directory
os.chdir(opts.repo_directory)
duk_version, duk_major, duk_minor, duk_patch, duk_version_formatted = get_duk_version()
if opts.git_commit is not None:
git_commit = opts.git_commit
else:
git_commit = exec_get_stdout([ 'git', 'rev-parse', 'HEAD' ], default='external').strip()
if opts.git_describe is not None:
git_describe = opts.git_describe
else:
git_describe = exec_get_stdout([ 'git', 'describe', '--always', '--dirty' ], default='external').strip()
if opts.git_branch is not None:
git_branch = opts.git_branch
else:
git_branch = exec_get_stdout([ 'git', 'rev-parse', '--abbrev-ref', 'HEAD' ], default='external').strip()
git_commit_cstring = cstring(git_commit)
git_describe_cstring = cstring(git_describe)
git_branch_cstring = cstring(git_branch)
logger.info('Dist for Duktape version %s, commit %s, describe %s, branch %s' % \
(duk_version_formatted, git_commit, git_describe, git_branch))
# Create dist directory structure, copy files.
logger.debug('Create dist directories and copy static files')
os.chdir(opts.repo_directory)
create_dist_directories(dist)
os.chdir(opts.repo_directory)
copy_files([
'builtins.yaml',
'duk_alloc_default.c',
'duk_api_buffer.c',
'duk_api_bytecode.c',
'duk_api_call.c',
'duk_api_codec.c',
'duk_api_compile.c',
'duk_api_debug.c',
'duk_api_heap.c',
'duk_api_internal.h',
'duk_api_inspect.c',
'duk_api_memory.c',
'duk_api_object.c',
'duk_api_public.h.in',
'duk_api_stack.c',
'duk_api_string.c',
'duk_api_time.c',
'duk_bi_array.c',
'duk_bi_boolean.c',
'duk_bi_buffer.c',
'duk_bi_date.c',
'duk_bi_date_unix.c',
'duk_bi_date_windows.c',
'duk_bi_duktape.c',
'duk_bi_encoding.c',
'duk_bi_error.c',
'duk_bi_function.c',
'duk_bi_global.c',
'duk_bi_json.c',
'duk_bi_math.c',
'duk_bi_number.c',
'duk_bi_object.c',
'duk_bi_pointer.c',
'duk_bi_protos.h',
'duk_bi_proxy.c',
'duk_bi_reflect.c',
'duk_bi_regexp.c',
'duk_bi_string.c',
'duk_bi_symbol.c',
'duk_bi_thread.c',
'duk_bi_thrower.c',
'duk_dblunion.h.in',
'duk_debug_fixedbuffer.c',
'duk_debugger.c',
'duk_debugger.h',
'duk_debug.h',
'duk_debug_macros.c',
'duk_debug_vsnprintf.c',
'duk_error_augment.c',
'duk_error.h',
'duk_error_longjmp.c',
'duk_error_macros.c',
'duk_error_misc.c',
'duk_error_throw.c',
'duk_exception.h',
'duk_forwdecl.h',
'duk_harray.h',
'duk_hbuffer_alloc.c',
'duk_hbuffer.h',
'duk_hbuffer_ops.c',
'duk_hbufobj.h',
'duk_hbufobj_misc.c',
'duk_hcompfunc.h',
'duk_heap_alloc.c',
'duk_heap.h',
'duk_heap_hashstring.c',
'duk_heaphdr.h',
'duk_heap_markandsweep.c',
'duk_heap_memory.c',
'duk_heap_misc.c',
'duk_heap_refcount.c',
'duk_heap_stringcache.c',
'duk_heap_stringtable.c',
'duk_hnatfunc.h',
'duk_hobject_alloc.c',
'duk_hobject_class.c',
'duk_hobject_enum.c',
'duk_hobject_finalizer.c',
'duk_hobject.h',
'duk_hobject_misc.c',
'duk_hobject_pc2line.c',
'duk_hobject_props.c',
'duk_hstring.h',
'duk_hstring_misc.c',
'duk_hthread_alloc.c',
'duk_hthread_builtins.c',
'duk_hthread.h',
'duk_hthread_misc.c',
'duk_hthread_stacks.c',
'duk_internal.h',
'duk_jmpbuf.h',
'duk_js_arith.c',
'duk_js_bytecode.h',
'duk_js_call.c',
'duk_js_compiler.c',
'duk_js_compiler.h',
'duk_js_executor.c',
'duk_js.h',
'duk_json.h',
'duk_js_ops.c',
'duk_js_var.c',
'duk_lexer.c',
'duk_lexer.h',
'duk_numconv.c',
'duk_numconv.h',
'duk_regexp_compiler.c',
'duk_regexp_executor.c',
'duk_regexp.h',
'duk_replacements.c',
'duk_replacements.h',
'duk_selftest.c',
'duk_selftest.h',
'duk_strings.h',
'duktape.h.in',
'duk_tval.c',
'duk_tval.h',
'duk_unicode.h',
'duk_unicode_support.c',
'duk_unicode_tables.c',
'duk_util_bitdecoder.c',
'duk_util_bitencoder.c',
'duk_util_bufwriter.c',
'duk_util.h',
'duk_util_hashbytes.c',
'duk_util_misc.c',
'duk_util_tinyrandom.c',
'strings.yaml',
'SpecialCasing.txt',
'SpecialCasing-8bit.txt',
'UnicodeData.txt',
'UnicodeData-8bit.txt',
], 'src-input', os.path.join(dist, 'src-input'))
for fn in [
'tags.yaml',
'platforms.yaml',
'architectures.yaml',
'compilers.yaml',
'platforms',
'architectures',
'compilers',
'feature-options',
'config-options',
'helper-snippets',
'header-snippets',
'other-defines',
'examples'
]:
# Copy directories in their entirety
if os.path.isfile(os.path.join('config', fn)):
shutil.copyfile(os.path.join('config', fn), os.path.join(dist, 'config', fn))
else:
shutil.copytree(os.path.join('config', fn), os.path.join(dist, 'config', fn))
copy_files([
'configure.py',
'combine_src.py',
'create_spdx_license.py',
'duk_meta_to_strarray.py',
'dukutil.py',
'dump_bytecode.py',
'extract_caseconv.py',
'extract_chars.py',
'extract_unique_options.py',
'genbuiltins.py',
'genconfig.py',
'json2yaml.py',
'merge_debug_meta.py',
'prepare_unicode_data.py',
'resolve_combined_lineno.py',
'scan_strings.py',
'scan_used_stridx_bidx.py',
'yaml2json.py',
], 'tools', os.path.join(dist, 'tools'))
copy_files([
'README.rst'
], 'config', os.path.join(dist, 'config'))
copy_files([
'README.rst',
'Makefile',
'package.json',
'duk_debug.js',
'duk_debug_proxy.js',
'duk_classnames.yaml',
'duk_debugcommands.yaml',
'duk_debugerrors.yaml',
'duk_opcodes.yaml'
], 'debugger', os.path.join(dist, 'debugger'))
copy_files([
'index.html',
'style.css',
'webui.js'
], os.path.join('debugger', 'static'), os.path.join(dist, 'debugger', 'static'))
copy_files([
'console-minimal.js',
'global.js',
'object-prototype-definegetter.js',
'object-prototype-definesetter.js',
'object-assign.js',
'performance-now.js',
'duktape-isfastint.js',
'duktape-error-setter-writable.js',
'duktape-error-setter-nonwritable.js',
'duktape-buffer.js'
], 'polyfills', os.path.join(dist, 'polyfills'))
copy_files([
'README.rst'
], 'examples', os.path.join(dist, 'examples'))
copy_files([
'README.rst',
'duk_cmdline.c',
'duk_cmdline_ajduk.c'
], os.path.join('examples', 'cmdline'), os.path.join(dist, 'examples', 'cmdline'))
copy_files([
'README.rst',
'c_eventloop.c',
'c_eventloop.js',
'ecma_eventloop.js',
'main.c',
'poll.c',
'ncurses.c',
'socket.c',
'fileio.c',
'curses-timers.js',
'basic-test.js',
'server-socket-test.js',
'client-socket-test.js'
], os.path.join('examples', 'eventloop'), os.path.join(dist, 'examples', 'eventloop'))
copy_files([
'README.rst',
'hello.c'
], os.path.join('examples', 'hello'), os.path.join(dist, 'examples', 'hello'))
copy_files([
'README.rst',
'eval.c'
], os.path.join('examples', 'eval'), os.path.join(dist, 'examples', 'eval'))
copy_files([
'README.rst',
'fib.js',
'process.js',
'processlines.c',
'prime.js',
'primecheck.c',
'uppercase.c'
], os.path.join('examples', 'guide'), os.path.join(dist, 'examples', 'guide'))
copy_files([
'README.rst',
'globals.coffee',
'hello.coffee',
'mandel.coffee'
], os.path.join('examples', 'coffee'), os.path.join(dist, 'examples', 'coffee'))
copy_files([
'README.rst',
'jxpretty.c'
], os.path.join('examples', 'jxpretty'), os.path.join(dist, 'examples', 'jxpretty'))
copy_files([
'README.rst',
'sandbox.c'
], os.path.join('examples', 'sandbox'), os.path.join(dist, 'examples', 'sandbox'))
copy_files([
'README.rst',
'duk_alloc_logging.c',
'duk_alloc_logging.h',
'log2gnuplot.py'
], os.path.join('examples', 'alloc-logging'), os.path.join(dist, 'examples', 'alloc-logging'))
copy_files([
'README.rst',
'duk_alloc_torture.c',
'duk_alloc_torture.h'
], os.path.join('examples', 'alloc-torture'), os.path.join(dist, 'examples', 'alloc-torture'))
copy_files([
'README.rst',
'duk_alloc_hybrid.c',
'duk_alloc_hybrid.h'
], os.path.join('examples', 'alloc-hybrid'), os.path.join(dist, 'examples', 'alloc-hybrid'))
copy_files([
'README.rst',
'duk_trans_socket_unix.c',
'duk_trans_socket_windows.c',
'duk_trans_socket.h'
], os.path.join('examples', 'debug-trans-socket'), os.path.join(dist, 'examples', 'debug-trans-socket'))
copy_files([
'README.rst',
'duk_trans_dvalue.c',
'duk_trans_dvalue.h',
'test.c',
'Makefile'
], os.path.join('examples', 'debug-trans-dvalue'), os.path.join(dist, 'examples', 'debug-trans-dvalue'))
copy_files([
'README.rst',
'duk_codepage_conv.c',
'duk_codepage_conv.h',
'test.c'
], os.path.join('examples', 'codepage-conv'), os.path.join(dist, 'examples', 'codepage-conv'))
copy_files([
'README.rst',
'dummy_date_provider.c'
], os.path.join('examples', 'dummy-date-provider'), os.path.join(dist, 'examples', 'dummy-date-provider'))
copy_files([
'README.rst',
'cpp_exceptions.cpp'
], os.path.join('examples', 'cpp-exceptions'), os.path.join(dist, 'examples', 'cpp-exceptions'))
copy_files([
'README.rst'
], 'extras', os.path.join(dist, 'extras'))
copy_files([
'README.rst',
'duk_logging.c',
'duk_logging.h',
'test.c',
'Makefile'
], os.path.join('extras', 'logging'), os.path.join(dist, 'extras', 'logging'))
copy_files([
'README.rst',
'duk_v1_compat.c',
'duk_v1_compat.h',
'test.c',
'Makefile',
'test_eval1.js',
'test_eval2.js',
'test_compile1.js',
'test_compile2.js'
], os.path.join('extras', 'duk-v1-compat'), os.path.join(dist, 'extras', 'duk-v1-compat'))
copy_files([
'README.rst',
'duk_print_alert.c',
'duk_print_alert.h',
'test.c',
'Makefile'
], os.path.join('extras', 'print-alert'), os.path.join(dist, 'extras', 'print-alert'))
copy_files([
'README.rst',
'duk_console.c',
'duk_console.h',
'test.c',
'Makefile'
], os.path.join('extras', 'console'), os.path.join(dist, 'extras', 'console'))
copy_files([
'README.rst',
'duk_minimal_printf.c',
'duk_minimal_printf.h',
'Makefile',
'test.c'
], os.path.join('extras', 'minimal-printf'), os.path.join(dist, 'extras', 'minimal-printf'))
copy_files([
'README.rst',
'duk_module_duktape.c',
'duk_module_duktape.h',
'Makefile',
'test.c'
], os.path.join('extras', 'module-duktape'), os.path.join(dist, 'extras', 'module-duktape'))
copy_files([
'README.rst',
'duk_module_node.c',
'duk_module_node.h',
'Makefile',
'test.c'
], os.path.join('extras', 'module-node'), os.path.join(dist, 'extras', 'module-node'))
copy_files([
'README.rst',
'duk_alloc_pool.c',
'duk_alloc_pool.h',
'ptrcomp.yaml',
'ptrcomp_fixup.h',
'Makefile',
'test.c'
], os.path.join('extras', 'alloc-pool'), os.path.join(dist, 'extras', 'alloc-pool'))
copy_files([
'Makefile.cmdline',
'Makefile.dukdebug',
'Makefile.eventloop',
'Makefile.hello',
'Makefile.eval',
'Makefile.coffee',
'Makefile.jxpretty',
'Makefile.sandbox',
'Makefile.codepage',
'mandel.js'
], 'dist-files', dist)
copy_and_replace(os.path.join('dist-files', 'Makefile.sharedlibrary'), os.path.join(dist, 'Makefile.sharedlibrary'), {
'@DUK_VERSION@': str(duk_version),
'@SONAME_VERSION@': str(int(duk_version / 100)) # 10500 -> 105
})
copy_and_replace(os.path.join('dist-files', 'README.rst'), os.path.join(dist, 'README.rst'), {
'@DUK_VERSION_FORMATTED@': duk_version_formatted,
'@GIT_COMMIT@': git_commit,
'@GIT_DESCRIBE@': git_describe,
'@GIT_BRANCH@': git_branch
})
copy_files([
'LICENSE.txt', # not strict RST so keep .txt suffix
'AUTHORS.rst'
], '.', os.path.join(dist))
# RELEASES.rst is only updated in master. It's not included in the dist to
# make maintenance fixes easier to make.
copy_files([
'murmurhash2.txt',
'lua.txt',
'commonjs.txt',
'xoroshiro128plus.txt',
'splitmix64.txt'
], 'licenses', os.path.join(dist, 'licenses'))
# Merge debugger metadata.
merged = exec_print_stdout([
sys.executable, os.path.join('tools', 'merge_debug_meta.py'),
'--output', os.path.join(dist, 'debugger', 'duk_debug_meta.json'),
'--class-names', os.path.join('debugger', 'duk_classnames.yaml'),
'--debug-commands', os.path.join('debugger', 'duk_debugcommands.yaml'),
'--debug-errors', os.path.join('debugger', 'duk_debugerrors.yaml'),
'--opcodes', os.path.join('debugger', 'duk_opcodes.yaml')
] + forward_loglevel)
# Add a build metadata file.
doc = {
'type': 'duk_dist_meta',
'comment': 'Metadata for Duktape distributable',
'git_commit': git_commit,
'git_branch': git_branch,
'git_describe': git_describe,
'duk_version': duk_version,
'duk_version_string': duk_version_formatted
}
with open(os.path.join(dist, 'duk_dist_meta.json'), 'wb') as f:
f.write(json.dumps(doc, indent=4))
# Build prepared sources (src/, src-noline/, src-separate/) with default
# config. This is done using tools and metadata in the dist directory.
logger.debug('Create prepared sources for default configuration')
def prep_default_sources(dirname, extraopts):
cmd = [
sys.executable, os.path.join(dist, 'tools', 'configure.py'),
'--source-directory', os.path.join(dist, 'src-input'),
'--output-directory', os.path.join(dist, dirname),
'--config-metadata', os.path.join(dist, 'config'),
'--git-commit', git_commit, '--git-describe', git_describe, '--git-branch', git_branch,
'--omit-removed-config-options', '--omit-unused-config-options',
'--emit-config-sanity-check', '--emit-legacy-feature-check'
]
cmd += extraopts
if opts.rom_support:
cmd.append('--rom-support')
if opts.rom_auto_lightfunc:
cmd.append('--rom-auto-lightfunc')
for i in opts.user_builtin_metadata:
cmd.append('--user-builtin-metadata')
cmd.append(i)
cmd += forward_loglevel
exec_print_stdout(cmd)
prep_default_sources('src', [ '--line-directives' ])
prep_default_sources('src-noline', [])
prep_default_sources('src-separate', [ '--separate-sources' ])
# Clean up remaining temp files.
delete_matching_files(dist, lambda x: x[-4:] == '.tmp')
delete_matching_files(os.path.join(dist, 'tools'), lambda x: x[-4:] == '.pyc')
# Create SPDX license once all other files are in place (and cleaned).
if opts.create_spdx:
logger.debug('Create SPDX license')
try:
exec_get_stdout([
sys.executable,
os.path.join('tools', 'create_spdx_license.py'),
os.path.join(dist, 'license.spdx')
])
except:
logger.warning('')
logger.warning('***')
logger.warning('*** WARNING: Failed to create SPDX license, this should not happen for an official release!')
logger.warning('***')
logger.warning('')
else:
logger.debug('Skip SPDX license creation')
logger.info('Dist finished successfully')
if __name__ == '__main__':
main()
| harold-b/duktape | util/dist.py | Python | mit | 28,083 |
# Adapted from https://github.com/karulis/pybluez/blob/master/examples/simple/asynchronous-inquiry.py
#!/usr/bin/python
import sqlite3 as sql
import bluetooth
class DeviceHunter( bluetooth.DeviceDiscoverer ):
def setDBDetails( self ):
database = sql.connect('database.sqlite')
cursor = database.cursor()
self.__database = database
self.__cursor = cursor
def pre_inquiry( self ):
self.done = False
def device_discovered( self, macAddr, device_class, rssi, devName ):
major_classes = ( "Miscellaneous",
"Computer",
"Phone",
"LAN/Network Access point",
"Audio/Video",
"Peripheral",
"Imaging" )
major_class = (device_class >> 8) & 0xf
if major_class < 7:
devClass = major_classes[major_class]
else:
devClass = "Uncategorized"
self.handleUnknown( macAddr, devName, devClass )
def inquiry_complete(self):
self.done = True
def handleUnknown( self, macAddr, devName="", devClass="Uncategorized" ):
try:
self.__cursor
self.__database
except AttributeError:
self.setDBDetails()
try:
self.__cursor.execute( "SELECT `fname`, `lname`, `devicename`, `devicetype` FROM `devices` WHERE `macaddr`=?;", ( macAddr, ) )
Device = self.__cursor.fetchone()
except sql.Error as Err:
print( "Something went wrong whilst checking a new device!" )
print( Err )
if( Device == None ):
try:
if( devName == None ):
self.__cursor.execute( "INSERT into `devices` ( `fname`, `lname`, `macaddr`, `devicetype` ) VALUES ( 'Someone', 'Unregistered', ?, ? );", ( macAddr, devClass, ) )
else:
self.__cursor.execute( "INSERT into `devices` ( `fname`, `lname`, `devicename`, `devicetype`, `macaddr` ) VALUES ( 'Someone', 'Unregistered', ?, ?, ? );", ( devName, devClass, macAddr, ) )
self.__database.commit()
except sql.Error as Err:
print( "There was an error inserting a new device to the system!" )
print( Err )
else:
if( not Device[ 2 ] == devName ):
self.__cursor.execute( "UPDATE `devices` SET `devicename`=? WHERE `macaddr`=?;", ( devName, macAddr, ) )
self.__database.commit()
if( not Device[ 3 ] == devClass ):
self.__cursor.execute( "UPDATE `devices` SET `devicetype`=? WHERE `macaddr`=?;", ( devClass, macAddr, ) )
self.__database.commit()
def hunt( self ):
self.find_devices( lookup_names = True )
while True:
self.process_event()
if( self.done ): self.find_devices( lookup_names = True )
| KiloSierraCharlie/BlueTrack | deviceHunter.py | Python | gpl-3.0 | 2,985 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Simon Perkins
#
# This file is part of montblanc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import inspect
import os
from montblanc.logsetup import setup_logging, setup_test_logging
from montblanc.tests import test
__version__ = "0.7.0"
log = setup_logging()
# This solution for constants based on
# http://stackoverflow.com/a/2688086
# Create a property that throws when
# you try and set it
def constant(f):
def fset(self, value):
raise SyntaxError('Foolish Mortal! You would dare change a universal constant?')
def fget(self):
return f()
return property(fget, fset)
class MontblancConstants(object):
# The speed of light, in metres
@constant
def C():
return 299792458
# Create a constants object
constants = MontblancConstants()
def rime_solver_cfg(**kwargs):
"""
Produces a SolverConfiguration object, inherited from
a simple python dict, and containing the options required
to configure the RIME Solver.
Keyword arguments
-----------------
Any keyword arguments are inserted into the
returned dict.
Returns
-------
A SolverConfiguration object.
"""
from .configuration import (load_config, config_validator,
raise_validator_errors)
def _merge_copy(d1, d2):
return { k: _merge_copy(d1[k], d2[k]) if k in d1
and isinstance(d1[k], dict)
and isinstance(d2[k], dict)
else d2[k] for k in d2 }
try:
cfg_file = kwargs.pop('cfg_file')
except KeyError as e:
slvr_cfg = kwargs
else:
cfg = load_config(cfg_file)
slvr_cfg = _merge_copy(cfg, kwargs)
# Validate the configuration, raising any errors
validator = config_validator()
validator.validate(slvr_cfg)
raise_validator_errors(validator)
return validator.document
def rime_solver(slvr_cfg):
"""
rime_solver(slvr_cfg)
Returns a solver suitable for solving the RIME.
Parameters
----------
slvr_cfg : RimeSolverConfiguration
Solver Configuration.
Returns
-------
A solver
"""
import montblanc.factory
return montblanc.factory.rime_solver(slvr_cfg)
| ska-sa/montblanc | montblanc/__init__.py | Python | gpl-2.0 | 2,958 |
"""
Terminal control.
"""
#-------------------------------------------------------------------------------
import fcntl
import os
import struct
import termios
#-------------------------------------------------------------------------------
def get_cgwinsz(fd):
"""
Attempts to get window size from termios `TIOCGWINSZ` ioctl.
@raise NotImplementedError
The ioctl is not available.
@raise IOError
The fd is not a TTY.
"""
try:
TIOCGWINSZ = termios.TIOCGWINSZ
except AttributeError:
raise NotImplementedError("no TIOCGWINSZ")
Winsize = struct.Struct("HHHH")
winsz = fcntl.ioctl(fd, TIOCGWINSZ, " " * Winsize.size)
height, width, _, _ = Winsize.unpack(winsz)
return height, width
def get_terminal_size(default=(80, 25)):
"""
Attempts to determine the terminal width and height.
Returns:
1. The values from environment variables 'COLUMNS' and 'LINES', if set
and integers.
2. Else, the width of the TTY attached to any one stdin/stdout/stderr.
3. Else, the default value.
"""
def try_env(name):
try:
val = int(os.environ[name])
except (KeyError, ValueError):
pass
else:
if val > 0:
return val
return None
width = try_env("COLUMNS")
height = try_env("LINES")
if width is None or height is None:
# Try each of the original stdin, stdout, stderr for attached TTY.
for fd in (0, 1, 2):
try:
winsz_height, winsz_width = get_cgwinsz(fd)
except (NotImplementedError, IOError):
pass
else:
if height is None:
height = winsz_height
if width is None:
width = winsz_width
break
if width is None:
width = default[0]
if height is None:
height = default[1]
return width, height
def get_terminal_width(default=80):
"""
Attempts to determine the terminal width.
@deprecated
Use `get_terminal_size()`.
"""
width, _ = get_terminal_size(default=(default, 25))
return width
| twosigma/ngrid | ngrid/terminal.py | Python | bsd-3-clause | 2,217 |
#
# Copyright 2008-2015 Universidad Complutense de Madrid
#
# This file is part of Numina
#
# Numina is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Numina is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Numina. If not, see <http://www.gnu.org/licenses/>.
#
from .recipes import BaseRecipe
from .recipes import BaseRecipeAutoQC
from .dataframe import DataFrame
from .pipeline import InstrumentDRP, Pipeline, InstrumentConfiguration
from .pipeline import ObservingMode
from .objimport import import_object
from .objimport import fully_qualified_name
from .pipelineload import drp_load
from .requirements import Parameter, Requirement
from .products import DataProductType, DataFrameType
from .oresult import ObservationResult
from numina.exceptions import RecipeError # Do not remove, part of the API
from numina.exceptions import ValidationError # Do not remove, part of the API
from .recipeinout import RecipeInput, define_requirements, define_input
from .recipeinout import RecipeResult, define_result
from .recipeinout import ErrorRecipeResult
from .dataholders import Product
from .oresult import obsres_from_dict
from .qc import QC
# FIXME: these two are deprecated
FrameDataProduct = DataFrameType
DataProduct = DataProductType
| Pica4x6/numina | numina/core/__init__.py | Python | gpl-3.0 | 1,688 |
from functools import partial, reduce
import operator
from django.contrib import messages
from django.contrib.admin import (register, TabularInline, StackedInline,
ModelAdmin, HORIZONTAL)
from django.contrib.admin.options import BaseModelAdmin
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.admin import SimpleListFilter
from django.contrib.gis.admin import OSMGeoAdmin
from django.db.models import Q, TextField
from django.forms.models import modelformset_factory
from django.shortcuts import redirect
from django.utils.html import format_html_join
from django.utils.translation import ugettext_lazy as _
from grappelli.forms import GrappelliSortableHiddenMixin
from reversion.admin import VersionAdmin
from super_inlines.admin import SuperInlineModelAdmin, SuperModelAdmin
from tinymce.widgets import TinyMCE
from common.utils.cache import is_user_locked, lock_user
from common.utils.file import FileAnalyzer
from .models import *
from .forms import (
OeuvreForm, SourceForm, IndividuForm, ElementDeProgrammeForm,
ElementDeDistributionForm, EnsembleForm, SaisonForm, PartieForm,
LieuAdminForm,
)
from .jobs import (
events_to_pdf as events_to_pdf_job, split_pdf as split_pdf_job,
)
from common.utils.export import launch_export
from typography.utils import replace
__all__ = ()
#
# Common
#
class CustomBaseModel(BaseModelAdmin):
# FIXME: Utiliser un AuthenticationBackend personnalisé.
def check_user_ownership(self, request, obj, has_class_permission):
if not has_class_permission:
return False
user = request.user
if obj is not None and not user.is_superuser \
and obj.owner not in user.get_descendants(include_self=True):
return False
return True
def has_change_permission(self, request, obj=None):
has_class_permission = super(CustomBaseModel,
self).has_change_permission(request, obj)
return self.check_user_ownership(request, obj, has_class_permission)
def has_delete_permission(self, request, obj=None):
# FIXME: À cause d'un bug dans
# django.contrib.admin.actions.delete_selected, cette action autorise
# un utilisateur restreint à supprimer des objets pour lesquels il n'a
# pas le droit.
has_class_permission = super(CustomBaseModel,
self).has_delete_permission(request, obj)
return self.check_user_ownership(request, obj, has_class_permission)
def get_queryset(self, request):
user = request.user
qs = super(CustomBaseModel, self).get_queryset(request)
if not user.is_superuser and IS_POPUP_VAR not in request.GET:
qs = qs.filter(
owner__in=user.get_descendants(include_self=True))
return qs
# Common fieldsets
PERIODE_D_ACTIVITE_FIELDSET = (_('Période d’activité'), {
'fields': (('debut', 'debut_precision'), ('fin', 'fin_precision'))
})
#
# Filters
#
class HasRelatedObjectsListFilter(SimpleListFilter):
title = _('possède des objets liés')
parameter_name = 'has_related_objects'
def lookups(self, request, model_admin):
return (
('1', _('Oui')),
('0', _('Non')),
)
def queryset(self, request, queryset):
if self.value() == '1':
return queryset.with_related_objects()
if self.value() == '0':
return queryset.without_related_objects()
def build_boolean_list_filter(class_title, class_parameter_name, filter=None,
exclude=None):
class HasEventsListFilter(SimpleListFilter):
title = class_title
parameter_name = class_parameter_name
def lookups(self, request, model_admin):
return (
('1', _('Oui')),
('0', _('Non')),
)
def queryset(self, request, queryset):
if self.value() == '1':
query = getattr(queryset, 'filter' if filter is not None
else 'exclude')
return query(filter if filter is not None
else exclude).distinct()
if self.value() == '0':
query = getattr(queryset, 'filter' if exclude is not None
else 'exclude')
return query(exclude if exclude is not None
else filter).distinct()
return HasEventsListFilter
EventHasSourceListFilter = build_boolean_list_filter(_('source'), 'has_source',
exclude=Q(sources=None))
EventHasProgramListFilter = build_boolean_list_filter(
_('programme'), 'has_program',
Q(programme__isnull=False) | Q(relache=True))
SourceHasParentListFilter = build_boolean_list_filter(
_('a un parent'), 'has_parent', filter=Q(parent__isnull=False),
)
SourceHasEventsListFilter = build_boolean_list_filter(
_('événements'), 'has_events', exclude=Q(evenements=None))
SourceHasProgramListFilter = build_boolean_list_filter(
_('programme'), 'has_program',
Q(evenements__programme__isnull=False) | Q(evenements__relache=True))
#
# Inlines
#
class CustomTabularInline(TabularInline, CustomBaseModel):
extra = 0
exclude = ('owner',)
class CustomStackedInline(StackedInline, CustomBaseModel):
extra = 0
exclude = ('owner',)
class OeuvreMereInline(CustomTabularInline):
model = ParenteDOeuvres
verbose_name = model._meta.get_field('mere').verbose_name
verbose_name_plural = _('œuvres mères')
fk_name = 'fille'
raw_id_fields = ('mere',)
autocomplete_lookup_fields = {
'fk': ('mere',),
}
fields = ('type', 'mere')
classes = ('grp-collapse grp-closed',)
class PupitreInline(CustomTabularInline):
model = Pupitre
verbose_name = model._meta.verbose_name
verbose_name_plural = _('effectif')
raw_id_fields = ('partie',)
autocomplete_lookup_fields = {
'fk': ['partie'],
}
fields = ('partie', 'soliste', 'quantite_min', 'quantite_max',
'facultatif')
classes = ('grp-collapse grp-closed',)
class IndividuParentInline(CustomTabularInline):
model = ParenteDIndividus
verbose_name = model._meta.get_field('parent').verbose_name
verbose_name_plural = _('individus parents')
fk_name = 'enfant'
raw_id_fields = ('parent',)
autocomplete_lookup_fields = {
'fk': ('parent',),
}
fields = ('type', 'parent')
classes = ('grp-collapse grp-closed',)
class OeuvreLieesInline(StackedInline):
model = Oeuvre
classes = ('grp-collapse grp-closed',)
class AuteurInline(CustomTabularInline):
model = Auteur
raw_id_fields = ('individu', 'ensemble', 'profession')
autocomplete_lookup_fields = {
'fk': ['individu', 'ensemble', 'profession'],
}
fields = ('individu', 'ensemble', 'profession')
def get_formset(self, request, obj=None, **kwargs):
formset = super(AuteurInline,
self).get_formset(request, obj=obj, **kwargs)
if request.method == 'POST' or 'extrait_de' not in request.GET:
return formset
# Lorsqu’on saisit un extrait, il faut que les auteurs
# soient déjà remplis, l’utilisateur n’aura qu’à les modifier dans les
# cas où cela ne correspondrait pas à l’œuvre mère (par exemple
# pour une ouverture d’opéra où le librettiste n’est pas auteur).
extrait_de = Oeuvre.objects.get(pk=request.GET['extrait_de'])
initial = list(
extrait_de.auteurs.values('individu', 'ensemble', 'profession'))
class TmpFormset(formset):
extra = len(initial)
def __init__(self, *args, **kwargs):
kwargs['initial'] = initial
super(TmpFormset, self).__init__(*args, **kwargs)
return TmpFormset
class MembreInline(CustomStackedInline):
model = Membre
raw_id_fields = ('individu', 'instrument', 'profession')
autocomplete_lookup_fields = {
'fk': ['individu', 'instrument', 'profession'],
}
fieldsets = (
(None, {'fields': (
'individu', 'instrument', 'profession', 'classement',
)}),
PERIODE_D_ACTIVITE_FIELDSET,
)
class ElementDeDistributionInline(SuperInlineModelAdmin, CustomTabularInline):
model = ElementDeDistribution
form = ElementDeDistributionForm
verbose_name_plural = _('distribution')
raw_id_fields = ('individu', 'ensemble', 'partie', 'profession')
autocomplete_lookup_fields = {
'fk': ['individu', 'ensemble', 'partie', 'profession'],
}
fields = ('individu', 'ensemble', 'partie', 'profession')
classes = ('grp-collapse grp-open',)
def get_queryset(self, request):
qs = super(ElementDeDistributionInline, self).get_queryset(request)
return qs.select_related('individu', 'ensemble', 'partie', 'profession')
class ElementDeProgrammeInline(SuperInlineModelAdmin,
GrappelliSortableHiddenMixin,
CustomStackedInline):
model = ElementDeProgramme
form = ElementDeProgrammeForm
verbose_name_plural = _('programme')
fieldsets = (
(None, {
'fields': (('oeuvre', 'autre',), 'caracteristiques',
('numerotation', 'part_d_auteur'),
'position'),
}),
)
raw_id_fields = ('oeuvre', 'caracteristiques',)
autocomplete_lookup_fields = {
'fk': ('oeuvre',),
'm2m': ('caracteristiques',),
}
classes = ('grp-collapse grp-open',)
inlines = (ElementDeDistributionInline,)
def get_queryset(self, request):
qs = super(ElementDeProgrammeInline, self).get_queryset(request)
return qs.select_related('oeuvre').prefetch_related(
'caracteristiques', 'distribution',
'distribution__individu', 'distribution__ensemble',
'distribution__partie', 'distribution__profession')
class SourceEvenementInline(TabularInline):
model = SourceEvenement
verbose_name = _('événement lié')
verbose_name_plural = _('événements liés')
classes = ('grp-collapse grp-closed',)
extra = 0
raw_id_fields = ('evenement',)
related_lookup_fields = {
'fk': ('evenement',),
}
class SourceOeuvreInline(TabularInline):
model = SourceOeuvre
verbose_name = _('œuvre liée')
verbose_name_plural = _('œuvres liées')
classes = ('grp-collapse grp-closed',)
extra = 0
raw_id_fields = ('oeuvre',)
autocomplete_lookup_fields = {
'fk': ('oeuvre',),
}
class SourceIndividuInline(TabularInline):
model = SourceIndividu
verbose_name = _('individu lié')
verbose_name_plural = _('individus liés')
classes = ('grp-collapse grp-closed',)
extra = 0
raw_id_fields = ('individu',)
autocomplete_lookup_fields = {
'fk': ('individu',),
}
class SourceEnsembleInline(TabularInline):
model = SourceEnsemble
verbose_name = _('ensemble lié')
verbose_name_plural = _('ensembles liés')
classes = ('grp-collapse grp-closed',)
extra = 0
raw_id_fields = ('ensemble',)
autocomplete_lookup_fields = {
'fk': ('ensemble',),
}
class SourceLieuInline(TabularInline):
model = SourceLieu
verbose_name = _('lieu lié')
verbose_name_plural = _('lieux liés')
classes = ('grp-collapse grp-closed',)
extra = 0
raw_id_fields = ('lieu',)
autocomplete_lookup_fields = {
'fk': ('lieu',),
}
class SourcePartieInline(TabularInline):
model = SourcePartie
verbose_name = _('rôle ou instrument lié')
verbose_name_plural = _('rôles ou instruments liés')
classes = ('grp-collapse grp-closed',)
extra = 0
raw_id_fields = ('partie',)
autocomplete_lookup_fields = {
'fk': ('partie',),
}
#
# ModelAdmins
#
# FIXME: Workaround for https://code.djangoproject.com/ticket/26184
# Remove when fixed.
def lookup_needs_distinct(opts, lookup_path):
"""
Returns True if 'distinct()' should be used to query the given lookup path.
"""
field = None
# Go through the fields (following all relations) and look for an m2m
for lookup_part in lookup_path.split('__'):
if field is not None:
# Checks whether the current lookup part is not a field.
try:
if field.get_transform(lookup_part) is not None \
or field.get_lookup(lookup_part) is not None:
continue
except (NotImplementedError, TypeError):
continue
field = opts.get_field(lookup_part)
if hasattr(field, 'get_path_info'):
# This field is a relation, update opts to follow the relation
path_info = field.get_path_info()
opts = path_info[-1].to_opts
if any(path.m2m for path in path_info):
# This field is a m2m relation so we know we need to call distinct
return True
return False
class CommonAdmin(CustomBaseModel, ModelAdmin):
list_per_page = 20
save_as = True
additional_fields = ('owner',)
additional_readonly_fields = ('owner',)
admin_fields = ()
additional_list_display = ('owner',)
additional_list_editable = ()
additional_list_filters = ('owner', HasRelatedObjectsListFilter,)
fieldsets_and_inlines_order = ()
def __init__(self, *args, **kwargs):
self.readonly_fields += self.additional_readonly_fields
self.list_display += self.additional_list_display
self.list_filter += self.additional_list_filters
self.added_fieldsets = ()
super(CommonAdmin, self).__init__(*args, **kwargs)
def get_fieldsets(self, request, obj=None):
fieldsets = super(CommonAdmin, self).get_fieldsets(request, obj=obj)
# Si fields ou fieldsets sont définis, alors le formulaire
# est fait automatiquement et inclut donc les champs qu'on voudrait
# ajouter ci-dessous.
if self.fields or self.fieldsets:
added_fields = self._get_added_fields(
request, 'additional_fields', excluded=self.exclude or (),
)
if added_fields:
self.added_fieldsets = (
(_('Notes'), {
'classes': ('grp-collapse grp-closed',),
'fields': added_fields,
}),
)
return tuple(fieldsets) + self.added_fieldsets
def _get_added_fields(self, request, additional_fields_attname,
excluded=()):
if not request.user.is_superuser:
excluded += self.admin_fields
added_fields = []
for added_field in getattr(self, additional_fields_attname, ()):
if added_field not in excluded:
added_fields.append(added_field)
return tuple(added_fields)
def save_model(self, request, obj, form, change):
if hasattr(obj, 'owner') and obj.owner is None:
obj.owner = request.user
super(CommonAdmin, self).save_model(request, obj, form, change)
def save_formset(self, request, form, formset, change):
instances = formset.save(commit=False)
for instance in instances:
if hasattr(instance, 'owner') and instance.owner is None:
instance.owner = request.user
instance.save()
formset.save()
def get_list_editable(self, request, **kwargs):
added_editable_fields = self._get_added_fields(
request, 'additional_list_editable')
return tuple(self.list_editable) + added_editable_fields
def get_changelist_formset(self, request, **kwargs):
"""
Modified version of the overriden method.
"""
defaults = {
'formfield_callback': partial(
self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
list_editable = self.get_list_editable(request, **kwargs)
return modelformset_factory(
self.model, self.get_changelist_form(request), extra=0,
fields=list_editable, **defaults)
def get_changelist(self, request, **kwargs):
ChangeList = super(CommonAdmin, self).get_changelist(request, **kwargs)
list_editable = self.get_list_editable(request, **kwargs)
class NewChangeList(ChangeList):
def __init__(self, *args, **kwargs):
super(NewChangeList, self).__init__(*args, **kwargs)
if not self.is_popup:
self.list_editable = list_editable
return NewChangeList
def get_search_results(self, request, queryset, search_term):
search_term = replace(search_term)
# FIXME: What follows is a copy of the original get_search_results.
# It is a workaround to https://code.djangoproject.com/ticket/26184
# Remove when fixed.
def construct_search(field_name):
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
use_distinct = False
search_fields = self.get_search_fields(request)
if search_fields and search_term:
orm_lookups = [construct_search(str(search_field))
for search_field in search_fields]
for bit in search_term.split():
or_queries = [Q(**{orm_lookup: bit})
for orm_lookup in orm_lookups]
queryset = queryset.filter(reduce(operator.or_, or_queries))
if not use_distinct:
for search_spec in orm_lookups:
if lookup_needs_distinct(self.opts, search_spec):
use_distinct = True
break
return queryset, use_distinct
class PublishedAdmin(CommonAdmin):
additional_fields = ('etat', 'owner')
admin_fields = ('etat',)
additional_list_display = ('etat', 'owner')
additional_list_editable = ('etat',)
additional_list_filters = ('etat', 'owner', HasRelatedObjectsListFilter,)
class AutoriteAdmin(PublishedAdmin):
additional_fields = ('etat', 'notes_publiques', 'notes_privees', 'owner')
class TypeDeParenteCommonAdmin(CommonAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel', 'nom_relatif',
'nom_relatif_pluriel', 'classement',)
list_editable = ('nom', 'nom_pluriel', 'nom_relatif',
'nom_relatif_pluriel', 'classement',)
search_fields = ('nom__unaccent', 'nom_relatif__unaccent',
'nom_pluriel__unaccent', 'nom_relatif_pluriel__unaccent')
fieldsets = (
(None, {'fields': (
('nom', 'nom_pluriel'), ('nom_relatif', 'nom_relatif_pluriel'),
'classement',
)
}),
)
@register(TypeDeParenteDOeuvres)
class TypeDeParenteDOeuvresAdmin(VersionAdmin, TypeDeParenteCommonAdmin):
pass
@register(TypeDeParenteDIndividus)
class TypeDeParenteDIndividusAdmin(VersionAdmin, TypeDeParenteCommonAdmin):
pass
@register(Etat)
class EtatAdmin(VersionAdmin, CommonAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel', 'public',
'has_related_objects')
list_editable = ('nom', 'nom_pluriel', 'public')
@register(NatureDeLieu)
class NatureDeLieuAdmin(VersionAdmin, CommonAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel', 'referent',)
list_editable = ('nom', 'nom_pluriel', 'referent',)
list_filter = ('referent',)
search_fields = ('nom__unaccent', 'nom_pluriel__unaccent')
@register(Lieu)
class LieuAdmin(OSMGeoAdmin, AutoriteAdmin):
form = LieuAdminForm
list_display = ('__str__', 'nom', 'parent', 'nature', 'link',)
list_editable = ('nom', 'parent', 'nature',)
search_fields = ('nom__unaccent', 'parent__nom__unaccent',)
list_filter = ('nature',)
raw_id_fields = ('parent',)
autocomplete_lookup_fields = {
'fk': ['parent'],
}
readonly_fields = ('__str__', 'html', 'link',)
fieldsets = (
(None, {
'fields': (('nom', 'parent'), ('nature', 'is_institution'),
'historique', 'geometry', ('latitude', 'longitude')),
}),
)
layerswitcher = False
default_lon = 300000
default_lat = 5900000
default_zoom = 5
point_zoom = default_zoom
@register(Saison)
class SaisonAdmin(VersionAdmin, CommonAdmin):
form = SaisonForm
list_display = ('__str__', 'lieu', 'ensemble', 'debut', 'fin',
'evenements_count')
date_hierarchy = 'debut'
raw_id_fields = ('lieu', 'ensemble')
autocomplete_lookup_fields = {
'fk': ['lieu', 'ensemble'],
}
@register(Profession)
class ProfessionAdmin(VersionAdmin, AutoriteAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel', 'nom_feminin',
'nom_feminin_pluriel', 'parent', 'classement')
list_editable = ('nom', 'nom_pluriel', 'nom_feminin',
'nom_feminin_pluriel', 'parent', 'classement')
search_fields = (
'nom__unaccent', 'nom_pluriel__unaccent',
'nom_feminin__unaccent', 'nom_feminin_pluriel__unaccent')
raw_id_fields = ('parent',)
autocomplete_lookup_fields = {
'fk': ('parent',),
}
fieldsets = (
(None, {
'fields': ('nom', 'nom_pluriel',
'nom_feminin', 'nom_feminin_pluriel',
'parent', 'classement'),
}),
)
@register(Individu)
class IndividuAdmin(VersionAdmin, AutoriteAdmin):
list_per_page = 20
list_display = ('__str__', 'nom', 'prenoms',
'pseudonyme', 'titre', 'naissance',
'deces', 'calc_professions', 'link',)
list_editable = ('nom', 'titre',)
search_fields = (
'nom__unaccent', 'pseudonyme__unaccent', 'nom_naissance__unaccent',
'prenoms__unaccent',)
list_filter = ('titre',)
form = IndividuForm
raw_id_fields = ('naissance_lieu', 'deces_lieu', 'professions')
autocomplete_lookup_fields = {
'fk': ('naissance_lieu', 'deces_lieu'),
'm2m': ('professions', 'parentes'),
}
readonly_fields = ('__str__', 'html', 'link',)
inlines = (IndividuParentInline,)
fieldsets = (
(None, {
'fields': (('titre', 'prenoms'), ('particule_nom', 'nom'),
'professions',),
}),
(_('Naissance'), {
'fields': (
('naissance_date', 'naissance_date_approx'),
('naissance_lieu', 'naissance_lieu_approx'))
}),
(_('Décès'), {
'fields': (
('deces_date', 'deces_date_approx'),
('deces_lieu', 'deces_lieu_approx'))
}),
(_('Informations complémentaires'), {
'classes': ('grp-collapse grp-closed',),
'fields': ('pseudonyme',
'prenoms_complets',
('particule_nom_naissance', 'nom_naissance'),
'designation', 'biographie', ('isni', 'sans_isni')),
}),
)
fieldsets_and_inlines_order = ('f', 'f', 'f', 'f', 'i', 'i')
def get_queryset(self, request):
qs = super(IndividuAdmin, self).get_queryset(request)
return qs.select_related(
'naissance_lieu', 'deces_lieu', 'etat', 'owner'
).prefetch_related('professions')
@register(TypeDEnsemble)
class TypeDEnsembleAdmin(VersionAdmin, CommonAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel', 'parent')
list_editable = ('nom', 'nom_pluriel', 'parent')
search_fields = ('nom__unaccent', 'nom_pluriel__unaccent',)
raw_id_fields = ('parent',)
autocomplete_lookup_fields = {
'fk': ('parent',),
}
@register(Ensemble)
class EnsembleAdmin(VersionAdmin, AutoriteAdmin):
form = EnsembleForm
list_display = ('__str__', 'type', 'membres_count')
search_fields = ('nom__unaccent', 'membres__individu__nom__unaccent')
inlines = (MembreInline,)
raw_id_fields = ('siege', 'type')
autocomplete_lookup_fields = {
'fk': ('siege', 'type'),
}
fieldsets = (
(None, {
'fields': (('particule_nom', 'nom'), 'type', 'siege',
('isni', 'sans_isni')),
}),
PERIODE_D_ACTIVITE_FIELDSET,
)
fieldsets_and_inlines_order = ('f', 'f', 'i')
@register(GenreDOeuvre)
class GenreDOeuvreAdmin(VersionAdmin, CommonAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel', 'has_related_objects')
list_editable = ('nom', 'nom_pluriel',)
search_fields = ('nom__unaccent', 'nom_pluriel__unaccent',)
raw_id_fields = ('parents',)
autocomplete_lookup_fields = {
'm2m': ('parents',),
}
@register(TypeDeCaracteristiqueDeProgramme)
class TypeDeCaracteristiqueDeProgrammeAdmin(VersionAdmin, CommonAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel', 'classement',)
list_editable = ('nom', 'nom_pluriel', 'classement',)
search_fields = ('nom__unaccent', 'nom_pluriel__unaccent')
@register(CaracteristiqueDeProgramme)
class CaracteristiqueDeProgrammeAdmin(VersionAdmin, CommonAdmin):
list_display = ('__str__', 'type', 'valeur', 'classement',)
list_editable = ('valeur', 'classement',)
search_fields = ('type__nom__unaccent', 'valeur__unaccent')
@register(Partie)
class PartieAdmin(VersionAdmin, AutoriteAdmin):
form = PartieForm
list_display = (
'__str__', 'nom', 'parent', 'oeuvre', 'classement',
'premier_interprete',
)
list_editable = (
'nom', 'parent', 'oeuvre', 'classement', 'premier_interprete',
)
list_filter = ('type',)
list_select_related = ('parent', 'etat', 'owner')
search_fields = ('nom__unaccent',)
radio_fields = {'type': HORIZONTAL}
raw_id_fields = ('oeuvre', 'professions', 'parent', 'premier_interprete')
autocomplete_lookup_fields = {
'm2m': ('professions',),
'fk': ('oeuvre', 'parent', 'premier_interprete'),
}
fieldsets = (
(None, {
'fields': (
'type', ('nom', 'nom_pluriel'),
'oeuvre', 'professions', 'parent', 'classement',
'premier_interprete',
),
}),
)
@register(Oeuvre)
class OeuvreAdmin(VersionAdmin, AutoriteAdmin):
form = OeuvreForm
list_display = ('__str__', 'titre', 'titre_secondaire', 'genre',
'caracteristiques_html', 'auteurs_html',
'creation', 'link',)
search_fields = Oeuvre.autocomplete_search_fields(add_icontains=False)
list_filter = ('genre', 'tonalite', 'arrangement', 'type_extrait')
list_select_related = ('genre', 'etat', 'owner')
date_hierarchy = 'creation_date'
raw_id_fields = ('genre', 'extrait_de', 'creation_lieu')
autocomplete_lookup_fields = {
'fk': ('genre', 'extrait_de', 'creation_lieu'),
}
readonly_fields = ('__str__', 'html', 'link',)
inlines = (AuteurInline, PupitreInline, OeuvreMereInline)
fieldsets = (
(_('Titre significatif'), {
'fields': (('prefixe_titre', 'titre',), 'coordination',
('prefixe_titre_secondaire', 'titre_secondaire',),),
}),
(None, {
'fields': (('genre', 'numero'), ('coupe', 'indeterminee')),
}),
(_('Données musicales'), {
'fields': ('incipit', ('tempo', 'tonalite'),
('sujet', 'arrangement')),
}),
(None, {
'fields': (('surnom', 'nom_courant'),),
}),
(None, {
'fields': (('opus', 'ict'),),
}),
(None, {
'fields': ('extrait_de', ('type_extrait', 'numero_extrait')),
}),
(_('Création'), {
'fields': (
'creation_type',
('creation_date', 'creation_date_approx'),
('creation_heure', 'creation_heure_approx'),
('creation_lieu', 'creation_lieu_approx'))
}),
)
fieldsets_and_inlines_order = ('i', 'f', 'f', 'i', 'f', 'f', 'f', 'f', 'f')
def get_queryset(self, request):
qs = super(OeuvreAdmin, self).get_queryset(request)
return qs.select_related(
'genre', 'extrait_de', 'creation_lieu',
'etat', 'owner'
).prefetch_related(
'auteurs__individu', 'auteurs__ensemble', 'auteurs__profession',
'pupitres__partie'
)
MAX_EXPORTED_EVENTS = 200
def events_to_pdf(modeladmin, request, queryset):
# Ensures the user is not trying to see something he should not.
queryset = queryset.published(request)
n = queryset.count()
if n > MAX_EXPORTED_EVENTS:
modeladmin.message_user(
request,
'Trop d’événements sélectionnés pour l’export ; '
'seuls les %s premiers seront exportés' % MAX_EXPORTED_EVENTS,
messages.WARNING)
queryset = queryset[:MAX_EXPORTED_EVENTS]
n = MAX_EXPORTED_EVENTS
launch_export(
events_to_pdf_job, request,
list(queryset.values_list('pk', flat=True)), 'PDF', 'de %s événements' % n)
events_to_pdf.short_description = _('Exporter en PDF')
@register(Evenement)
class EvenementAdmin(SuperModelAdmin, VersionAdmin, AutoriteAdmin):
list_display = ('__str__', 'relache', 'circonstance',
'has_source', 'has_program', 'link',)
list_editable = ('relache', 'circonstance',)
search_fields = ('circonstance__unaccent', 'debut_lieu__nom__unaccent')
list_filter = ('relache', EventHasSourceListFilter,
EventHasProgramListFilter)
list_select_related = ('debut_lieu', 'debut_lieu__nature',
'fin_lieu', 'fin_lieu__nature',
'etat', 'owner')
date_hierarchy = 'debut_date'
raw_id_fields = ('debut_lieu', 'fin_lieu', 'caracteristiques')
autocomplete_lookup_fields = {
'fk': ('debut_lieu', 'fin_lieu'),
'm2m': ('caracteristiques',),
}
readonly_fields = ('__str__', 'html', 'link')
inlines = (ElementDeDistributionInline, ElementDeProgrammeInline)
actions = [events_to_pdf]
fieldsets = (
(_('Début'), {
'fields': (
('debut_date', 'debut_date_approx'),
('debut_heure', 'debut_heure_approx'),
('debut_lieu', 'debut_lieu_approx'))
}),
(_('Fin'), {
'classes': ('grp-collapse grp-closed',),
'fields': (
('fin_date', 'fin_date_approx'),
('fin_heure', 'fin_heure_approx'),
('fin_lieu', 'fin_lieu_approx'))
}),
(None, {
'fields': (('circonstance', 'programme_incomplet', 'relache',),
'caracteristiques',),
}),
(_('Données économiques'), {
'classes': ('grp-collapse grp-closed',),
'fields': (('recette_generale', 'recette_par_billets'),),
}),
)
fieldsets_and_inlines_order = ('f', 'f', 'f', 'i', 'i')
def get_queryset(self, request):
qs = super(EvenementAdmin, self).get_queryset(request)
qs = qs.extra(select={
'_has_program':
'EXISTS (SELECT 1 FROM %s WHERE evenement_id = %s.id)'
% (ElementDeProgramme._meta.db_table, Evenement._meta.db_table),
'_has_source':
'EXISTS (SELECT 1 FROM %s WHERE evenement_id = %s.id)'
% (Source.evenements.field.m2m_db_table(),
Evenement._meta.db_table)})
return qs.select_related(
'debut_lieu', 'debut_lieu__nature',
'debut_lieu__parent', 'debut_lieu__parent__nature',
'etat', 'owner')
@register(TypeDeSource)
class TypeDeSourceAdmin(VersionAdmin, CommonAdmin):
list_display = ('__str__', 'nom', 'nom_pluriel',)
list_editable = ('nom', 'nom_pluriel',)
search_fields = ('nom__unaccent', 'nom_pluriel__unaccent')
def split_pdf(modeladmin, request, queryset):
# Ensures the user is not trying to see something he should not.
queryset = queryset.published(request)
queryset = queryset.filter(
type_fichier=FileAnalyzer.OTHER, fichier__endswith='.pdf',
children__isnull=True,
)
if not queryset:
messages.warning(
request,
_('Aucune source sélectionnée n’est un PDF sans enfant.')
)
return
if is_user_locked(request.user):
messages.error(
request,
_('Une séparation de PDF de votre part est déjà en cours. '
'Veuillez attendre la fin de celle-ci avant '
'd’en lancer une autre.'))
return
lock_user(request.user)
for source in queryset:
split_pdf_job.delay(source.pk, request.user.pk)
messages.info(
request,
_('La séparation de PDF est en cours. '
'Revenez consulter les sources dans quelques minutes.'))
split_pdf.short_description = _('Séparer le PDF')
@register(Source)
class SourceAdmin(VersionAdmin, AutoriteAdmin):
form = SourceForm
list_display = (
'__str__', 'parent', 'position', 'date', 'type', 'has_events',
'has_program', 'link',
)
list_editable = ('parent', 'position', 'type', 'date')
list_select_related = ('type', 'etat', 'owner')
date_hierarchy = 'date'
search_fields = (
'type__nom__unaccent', 'titre__unaccent', 'date',
'date_approx__unaccent', 'numero__unaccent',
'lieu_conservation__unaccent', 'cote__unaccent')
list_filter = (SourceHasParentListFilter, 'type', 'titre',
SourceHasEventsListFilter, SourceHasProgramListFilter)
raw_id_fields = ('parent', 'evenements', 'editeurs_scientifiques')
autocomplete_lookup_fields = {
'fk': ('parent',),
'm2m': ('editeurs_scientifiques',),
}
related_lookup_fields = {
'm2m': ['evenements'],
}
readonly_fields = ('__str__', 'html', 'children_links')
inlines = (
AuteurInline, SourceIndividuInline, SourceOeuvreInline,
SourcePartieInline, SourceLieuInline, SourceEvenementInline,
SourceEnsembleInline,
)
actions = [split_pdf]
fieldsets = (
(None, {
'fields': (
('parent', 'position', 'est_promu'),
),
}),
(None, {
'fields': (
'type', 'titre', 'legende',
),
}),
(None, {
'fields': (
('date', 'date_approx'),
('numero', 'page', 'folio',),
('lieu_conservation', 'cote',),
'url',
)
}),
(_('Transcription'), {
'classes': ('grp-collapse grp-closed',),
'fields': ('transcription',),
}),
(None, {
'fields': (('fichier', 'telechargement_autorise'),),
}),
(None, {
'fields': ('children_links',),
}),
(_('Présentation'), {
'classes': ('grp-collapse grp-closed',),
'fields': (
'editeurs_scientifiques', 'date_publication', 'publications',
'developpements', 'presentation', 'contexte',
'sources_et_protocole', 'bibliographie',
),
})
)
fieldsets_and_inlines_order = ('f', 'f', 'f', 'f', 'f', 'i', 'i',
'i', 'i', 'i', 'i', 'i', 'f')
admin_fields = AutoriteAdmin.admin_fields + ('est_promue',)
formfield_overrides = {
TextField: {'widget': TinyMCE},
}
def get_queryset(self, request):
qs = super(SourceAdmin, self).get_queryset(request)
qs = qs.extra(
select={
'_has_events':
'EXISTS ('
' SELECT 1 FROM %(evenement)s '
' INNER JOIN %(m2m)s ON %(evenement)s.id '
' = %(m2m)s.evenement_id '
' WHERE %(m2m)s.source_id = %(source)s.id)' % {
'evenement': Evenement._meta.db_table,
'm2m': Source.evenements.field.m2m_db_table(),
'source': Source._meta.db_table,
},
'_has_program':
'EXISTS ('
' SELECT 1 FROM %(evenement)s '
' INNER JOIN %(m2m)s ON %(evenement)s.id '
' = %(m2m)s.evenement_id '
' WHERE (%(m2m)s.source_id = %(source)s.id '
' AND (%(evenement)s.relache = true '
' OR EXISTS (SELECT 1 FROM %(programme)s '
' WHERE %(programme)s.evenement_id '
' = %(evenement)s.id))))' % {
'evenement': Evenement._meta.db_table,
'm2m': Source.evenements.field.m2m_db_table(),
'source': Source._meta.db_table,
'programme': ElementDeProgramme._meta.db_table,
}
}
)
return qs
def change_view(self, request, object_id, form_url='', extra_context=None):
source = self.get_object(request, object_id)
if source is not None and isinstance(source.specific, (Video, Audio)):
change_url = source.get_change_url()
if change_url != request.path:
return redirect(change_url)
return super().change_view(
request, object_id, form_url=form_url, extra_context=extra_context,
)
def children_links(self, instance):
return format_html_join(
', ',
'<a href="{}">{}</a>',
[(child.get_change_url(), child.position)
for child in instance.children.order_by('position')]
)
children_links.short_description = _('Enfants')
@register(Audio)
class AudioAdmin(SourceAdmin):
readonly_fields = SourceAdmin.readonly_fields + (
'fichier_ogg', 'fichier_mpeg', 'extrait_ogg', 'extrait_mpeg',
'duree', 'duree_extrait',
)
fieldsets = (
SourceAdmin.fieldsets[0],
SourceAdmin.fieldsets[1],
SourceAdmin.fieldsets[2],
SourceAdmin.fieldsets[3],
(_('Fichiers'), {
'fields': (
('fichier', 'duree'),
('fichier_ogg', 'fichier_mpeg'),
('extrait', 'duree_extrait'),
('extrait_ogg', 'extrait_mpeg'),
),
}),
)
@register(Video)
class VideoAdmin(AudioAdmin):
readonly_fields = AudioAdmin.readonly_fields + (
'largeur', 'hauteur', 'largeur_extrait', 'hauteur_extrait',
)
fieldsets = (
SourceAdmin.fieldsets[0],
SourceAdmin.fieldsets[1],
SourceAdmin.fieldsets[2],
SourceAdmin.fieldsets[3],
(_('Fichiers'), {
'fields': (
('fichier', 'duree',
'largeur', 'hauteur'),
('fichier_ogg', 'fichier_mpeg'),
('extrait', 'duree_extrait',
'largeur_extrait', 'hauteur_extrait'),
('extrait_ogg', 'extrait_mpeg'),
),
}),
)
| dezede/dezede | libretto/admin.py | Python | bsd-3-clause | 39,854 |
# -*- coding: utf-8 -*-
"""
Thai Word-to-Phoneme (Thai W2P)
GitHub : https://github.com/wannaphong/Thai_W2P
"""
from typing import Union
import numpy as np
from pythainlp.corpus import download, get_corpus_path
_GRAPHEMES = list(
"พจใงต้ืฮแาฐฒฤๅูศฅถฺฎหคสุขเึดฟำฝยลอ็ม"
+ " ณิฑชฉซทรฏฬํัฃวก่ป์ผฆบี๊ธญฌษะไ๋นโภ?"
)
_PHONEMES = list(
"-พจใงต้ืฮแาฐฒฤูศฅถฺฎหคสุขเึดฟำฝยลอ็ม"
+ " ณิฑชฉซทรํฬฏ–ัฃวก่ปผ์ฆบี๊ธฌญะไษ๋นโภ?"
)
_MODEL_NAME = "thai_w2p"
class _Hparams:
batch_size = 256
enc_maxlen = 30 * 2
dec_maxlen = 40 * 2
num_epochs = 50 * 2
hidden_units = 64 * 8
emb_units = 64 * 4
graphemes = ["<pad>", "<unk>", "</s>"] + _GRAPHEMES
phonemes = ["<pad>", "<unk>", "<s>", "</s>"] + _PHONEMES
lr = 0.001
hp = _Hparams()
def _load_vocab():
g2idx = {g: idx for idx, g in enumerate(hp.graphemes)}
idx2g = {idx: g for idx, g in enumerate(hp.graphemes)}
p2idx = {p: idx for idx, p in enumerate(hp.phonemes)}
idx2p = {idx: p for idx, p in enumerate(hp.phonemes)}
# note that g and p mean grapheme and phoneme, respectively.
return g2idx, idx2g, p2idx, idx2p
class Thai_W2P(object):
def __init__(self):
super().__init__()
self.graphemes = hp.graphemes
self.phonemes = hp.phonemes
self.g2idx, self.idx2g, self.p2idx, self.idx2p = _load_vocab()
self.checkpoint = get_corpus_path(_MODEL_NAME)
if self.checkpoint is None:
download(_MODEL_NAME)
self.checkpoint = get_corpus_path(_MODEL_NAME)
self._load_variables()
def _load_variables(self):
self.variables = np.load(self.checkpoint, allow_pickle=True)
# (29, 64). (len(graphemes), emb)
self.enc_emb = self.variables.item().get("encoder.emb.weight")
# (3*128, 64)
self.enc_w_ih = self.variables.item().get("encoder.rnn.weight_ih_l0")
# (3*128, 128)
self.enc_w_hh = self.variables.item().get("encoder.rnn.weight_hh_l0")
# (3*128,)
self.enc_b_ih = self.variables.item().get("encoder.rnn.bias_ih_l0")
# (3*128,)
self.enc_b_hh = self.variables.item().get("encoder.rnn.bias_hh_l0")
# (74, 64). (len(phonemes), emb)
self.dec_emb = self.variables.item().get("decoder.emb.weight")
# (3*128, 64)
self.dec_w_ih = self.variables.item().get("decoder.rnn.weight_ih_l0")
# (3*128, 128)
self.dec_w_hh = self.variables.item().get("decoder.rnn.weight_hh_l0")
# (3*128,)
self.dec_b_ih = self.variables.item().get("decoder.rnn.bias_ih_l0")
# (3*128,)
self.dec_b_hh = self.variables.item().get("decoder.rnn.bias_hh_l0")
# (74, 128)
self.fc_w = self.variables.item().get("decoder.fc.weight")
# (74,)
self.fc_b = self.variables.item().get("decoder.fc.bias")
def _sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def _grucell(self, x, h, w_ih, w_hh, b_ih, b_hh):
rzn_ih = np.matmul(x, w_ih.T) + b_ih
rzn_hh = np.matmul(h, w_hh.T) + b_hh
rz_ih, n_ih = (
rzn_ih[:, : rzn_ih.shape[-1] * 2 // 3],
rzn_ih[:, rzn_ih.shape[-1] * 2 // 3:],
)
rz_hh, n_hh = (
rzn_hh[:, : rzn_hh.shape[-1] * 2 // 3],
rzn_hh[:, rzn_hh.shape[-1] * 2 // 3:],
)
rz = self._sigmoid(rz_ih + rz_hh)
r, z = np.split(rz, 2, -1)
n = np.tanh(n_ih + r * n_hh)
h = (1 - z) * n + z * h
return h
def _gru(self, x, steps, w_ih, w_hh, b_ih, b_hh, h0=None) -> np.ndarray:
if h0 is None:
h0 = np.zeros((x.shape[0], w_hh.shape[1]), np.float32)
h = h0 # initial hidden state
outputs = np.zeros((x.shape[0], steps, w_hh.shape[1]), np.float32)
for t in range(steps):
h = self._grucell(x[:, t, :], h, w_ih, w_hh, b_ih, b_hh) # (b, h)
outputs[:, t, ::] = h
return outputs
def _encode(self, word: str) -> np.ndarray:
chars = list(word) + ["</s>"]
x = [self.g2idx.get(char, self.g2idx["<unk>"]) for char in chars]
x = np.take(self.enc_emb, np.expand_dims(x, 0), axis=0)
return x
def _short_word(self, word: str) -> Union[str, None]:
self.word = word
if self.word.endswith("."):
self.word = self.word.replace(".", "")
self.word = "-".join([i + "อ" for i in list(self.word)])
return self.word
return None
def _predict(self, word: str) -> str:
short_word = self._short_word(word)
if short_word is not None:
return short_word
# encoder
enc = self._encode(word)
enc = self._gru(
enc,
len(word) + 1,
self.enc_w_ih,
self.enc_w_hh,
self.enc_b_ih,
self.enc_b_hh,
h0=np.zeros((1, self.enc_w_hh.shape[-1]), np.float32),
)
last_hidden = enc[:, -1, :]
# decoder
dec = np.take(self.dec_emb, [2], axis=0) # 2: <s>
h = last_hidden
preds = []
for _ in range(20):
h = self._grucell(
dec,
h,
self.dec_w_ih,
self.dec_w_hh,
self.dec_b_ih,
self.dec_b_hh,
) # (b, h)
logits = np.matmul(h, self.fc_w.T) + self.fc_b
pred = logits.argmax()
if pred == 3:
break
preds.append(pred)
dec = np.take(self.dec_emb, [pred], axis=0)
preds = [self.idx2p.get(idx, "<unk>") for idx in preds]
return preds
def __call__(self, word: str) -> str:
if not any(letter in word for letter in self.graphemes):
pron = [word]
else: # predict for oov
pron = self._predict(word)
return "".join(pron)
_THAI_W2P = Thai_W2P()
def pronunciate(text: str) -> str:
"""
Convert a Thai word to its pronunciation in Thai letters.
Input should be one single word.
:param str text: Thai text to be pronunciated
:return: A string of Thai letters indicating
how the input text should be pronounced.
"""
global _THAI_W2P
return _THAI_W2P(text)
| PyThaiNLP/pythainlp | pythainlp/transliterate/w2p.py | Python | apache-2.0 | 6,546 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import bpy
from bpy.props import (
BoolProperty,
PointerProperty,
)
from bpy.types import (
PropertyGroup,
AddonPreferences,
)
bl_info = {
'name': 'UI Pie Menu Official',
'author': 'Antony Riakiotakis, Sebastian Koenig',
'version': (1, 1, 4),
'blender': (2, 7, 7),
'description': 'Individual Pie Menu Activation List',
'location': 'Addons Preferences',
'warning': '',
'wiki_url': 'https://wiki.blender.org/index.php/Extensions:2.6/Py/Scripts/3D_interaction/Pie_Menu',
'category': 'Pie Menu'
}
sub_modules_names = (
"pie_object_modes_of",
"pie_view_of",
"pie_shade_of",
"pie_manipulator_of",
"pie_pivot_of",
"pie_snap_of",
"pie_clip_marker_of",
)
sub_modules = [__import__(__package__ + "." + submod, {}, {}, submod) for submod in sub_modules_names]
sub_modules.sort(key=lambda mod: (mod.bl_info['category'], mod.bl_info['name']))
def _get_pref_class(mod):
import inspect
for obj in vars(mod).values():
if inspect.isclass(obj) and issubclass(obj, PropertyGroup):
if hasattr(obj, 'bl_idname') and obj.bl_idname == mod.__name__:
return obj
def get_addon_preferences(name=''):
"""Acquisition and registration"""
addons = bpy.context.user_preferences.addons
if __name__ not in addons: # wm.read_factory_settings()
return None
addon_prefs = addons[__name__].preferences
if name:
if not hasattr(addon_prefs, name):
for mod in sub_modules:
if mod.__name__.split('.')[-1] == name:
cls = _get_pref_class(mod)
if cls:
prop = PointerProperty(type=cls)
setattr(UIToolsPreferences, name, prop)
bpy.utils.unregister_class(UIToolsPreferences)
bpy.utils.register_class(UIToolsPreferences)
return getattr(addon_prefs, name, None)
else:
return addon_prefs
def register_submodule(mod):
mod.register()
mod.__addon_enabled__ = True
def unregister_submodule(mod):
if mod.__addon_enabled__:
mod.unregister()
mod.__addon_enabled__ = False
prefs = get_addon_preferences()
name = mod.__name__.split('.')[-1]
if hasattr(UIToolsPreferences, name):
delattr(UIToolsPreferences, name)
if prefs:
bpy.utils.unregister_class(UIToolsPreferences)
bpy.utils.register_class(UIToolsPreferences)
if name in prefs:
del prefs[name]
class UIToolsPreferences(AddonPreferences):
bl_idname = __name__
def draw(self, context):
layout = self.layout
for mod in sub_modules:
mod_name = mod.__name__.split('.')[-1]
info = mod.bl_info
column = layout.column()
box = column.box()
# first stage
expand = getattr(self, 'show_expanded_' + mod_name)
icon = 'TRIA_DOWN' if expand else 'TRIA_RIGHT'
col = box.column()
row = col.row()
sub = row.row()
sub.context_pointer_set('addon_prefs', self)
op = sub.operator('wm.context_toggle', text='', icon=icon,
emboss=False)
op.data_path = 'addon_prefs.show_expanded_' + mod_name
sub.label('{}: {}'.format(info['category'], info['name']))
sub = row.row()
sub.alignment = 'RIGHT'
if info.get('warning'):
sub.label('', icon='ERROR')
sub.prop(self, 'use_' + mod_name, text='')
# The second stage
if expand:
if info.get('description'):
split = col.row().split(percentage=0.15)
split.label('Description:')
split.label(info['description'])
if info.get('location'):
split = col.row().split(percentage=0.15)
split.label('Location:')
split.label(info['location'])
if info.get('author') and info.get('author') != 'chromoly':
split = col.row().split(percentage=0.15)
split.label('Author:')
split.label(info['author'])
if info.get('version'):
split = col.row().split(percentage=0.15)
split.label('Version:')
split.label('.'.join(str(x) for x in info['version']),
translate=False)
if info.get('warning'):
split = col.row().split(percentage=0.15)
split.label('Warning:')
split.label(' ' + info['warning'], icon='ERROR')
tot_row = int(bool(info.get('wiki_url')))
if tot_row:
split = col.row().split(percentage=0.15)
split.label(text='Internet:')
if info.get('wiki_url'):
op = split.operator('wm.url_open',
text='Documentation', icon='HELP')
op.url = info.get('wiki_url')
for i in range(4 - tot_row):
split.separator()
# Details and settings
if getattr(self, 'use_' + mod_name):
prefs = get_addon_preferences(mod_name)
if prefs and hasattr(prefs, 'draw'):
box = box.column()
prefs.layout = box
try:
prefs.draw(context)
except:
traceback.print_exc()
box.label(text='Error (see console)', icon='ERROR')
del prefs.layout
row = layout.row()
row.label("End of Pie Menu Activations")
for mod in sub_modules:
info = mod.bl_info
mod_name = mod.__name__.split('.')[-1]
def gen_update(mod):
def update(self, context):
enabled = getattr(self, 'use_' + mod.__name__.split('.')[-1])
if enabled:
register_submodule(mod)
else:
unregister_submodule(mod)
mod.__addon_enabled__ = enabled
return update
prop = BoolProperty(
name=info['name'],
description=info.get('description', ''),
update=gen_update(mod),
default=True,
)
setattr(UIToolsPreferences, 'use_' + mod_name, prop)
prop = BoolProperty()
setattr(UIToolsPreferences, 'show_expanded_' + mod_name, prop)
classes = (
UIToolsPreferences,
)
def register():
for cls in classes:
bpy.utils.register_class(cls)
prefs = get_addon_preferences()
for mod in sub_modules:
if not hasattr(mod, '__addon_enabled__'):
mod.__addon_enabled__ = False
name = mod.__name__.split('.')[-1]
if getattr(prefs, 'use_' + name):
register_submodule(mod)
def unregister():
for mod in sub_modules:
if mod.__addon_enabled__:
unregister_submodule(mod)
for cls in reversed(classes):
bpy.utils.unregister_class(cls)
if __name__ == "__main__":
register()
| Microvellum/Fluid-Designer | win64-vc/2.78/Python/bin/2.78/scripts/addons/pie_menus_official/__init__.py | Python | gpl-3.0 | 8,208 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ProjectCampaign.money_donated'
db.add_column(u'projects_projectcampaign', 'money_donated',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ProjectCampaign.money_donated'
db.delete_column(u'projects_projectcampaign', 'money_donated')
models = {
u'accounts.bluebottleuser': {
'Meta': {'object_name': 'BlueBottleUser'},
'about': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'}),
'availability': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'available_time': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'birthdate': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'contribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'newsletter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'picture': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'primary_language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'share_money': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'share_time_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'user_type': ('django.db.models.fields.CharField', [], {'default': "'person'", 'max_length': '25'}),
'username': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'why': ('django.db.models.fields.TextField', [], {'max_length': '265', 'blank': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'geo.country': {
'Meta': {'ordering': "['name']", 'object_name': 'Country'},
'alpha2_code': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'alpha3_code': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'numeric_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'oda_recipient': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subregion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['geo.SubRegion']"})
},
u'geo.region': {
'Meta': {'ordering': "['name']", 'object_name': 'Region'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'numeric_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'})
},
u'geo.subregion': {
'Meta': {'ordering': "['name']", 'object_name': 'SubRegion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'numeric_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['geo.Region']"})
},
u'organizations.organization': {
'Meta': {'ordering': "['name']", 'object_name': 'Organization'},
'account_bank_address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'account_bank_country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['geo.Country']", 'null': 'True', 'blank': 'True'}),
'account_bank_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'account_bic': ('django_iban.fields.SWIFTBICField', [], {'max_length': '11', 'blank': 'True'}),
'account_city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'account_iban': ('django_iban.fields.IBANField', [], {'max_length': '34', 'blank': 'True'}),
'account_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'account_other': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'deleted': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'legal_status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'partner_organizations': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'registration': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'skype': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
u'projects.partnerorganization': {
'Meta': {'object_name': 'PartnerOrganization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'projects.project': {
'Meta': {'ordering': "['title']", 'object_name': 'Project'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'team_member'", 'null': 'True', 'to': u"orm['accounts.BlueBottleUser']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['accounts.BlueBottleUser']"}),
'partner_organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.PartnerOrganization']", 'null': 'True', 'blank': 'True'}),
'phase': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'projects.projectambassador': {
'Meta': {'object_name': 'ProjectAmbassador'},
'description': ('django.db.models.fields.TextField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'project_plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectPlan']"})
},
u'projects.projectbudgetline': {
'Meta': {'object_name': 'ProjectBudgetLine'},
'amount': ('django.db.models.fields.PositiveIntegerField', [], {}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'EUR'", 'max_length': '10'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project_plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectPlan']"})
},
u'projects.projectcampaign': {
'Meta': {'object_name': 'ProjectCampaign'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'EUR'", 'max_length': "'10'"}),
'deadline': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'money_asked': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'money_donated': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'project': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['projects.Project']", 'unique': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'})
},
u'projects.projectpitch': {
'Meta': {'object_name': 'ProjectPitch'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['geo.Country']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '21', 'decimal_places': '18', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '21', 'decimal_places': '18', 'blank': 'True'}),
'need': ('django.db.models.fields.CharField', [], {'default': "'both'", 'max_length': '20', 'null': 'True'}),
'pitch': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['projects.Project']", 'unique': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'theme': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectTheme']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'video_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '100', 'blank': 'True'})
},
u'projects.projectplan': {
'Meta': {'object_name': 'ProjectPlan'},
'campaign': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['geo.Country']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'effects': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'for_who': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'future': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '255', 'blank': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '21', 'decimal_places': '18', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '21', 'decimal_places': '18', 'blank': 'True'}),
'money_needed': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'need': ('django.db.models.fields.CharField', [], {'default': "'both'", 'max_length': '20', 'null': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['organizations.Organization']", 'null': 'True', 'blank': 'True'}),
'pitch': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'project': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['projects.Project']", 'unique': 'True'}),
'reach': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'theme': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectTheme']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'video_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'projects.projectresult': {
'Meta': {'object_name': 'ProjectResult'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['projects.Project']", 'unique': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'projects.projecttheme': {
'Meta': {'ordering': "['name']", 'object_name': 'ProjectTheme'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.tag': {
'Meta': {'object_name': 'Tag'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
u'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_tagged_items'", 'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'taggit_taggeditem_items'", 'to': u"orm['taggit.Tag']"})
}
}
complete_apps = ['projects'] | gannetson/sportschooldeopenlucht | apps/projects/migrations/0004_auto__add_field_projectcampaign_money_donated.py | Python | bsd-3-clause | 20,314 |
from pyoptools.all import *
from time import time
from numpy import exp,pi,angle,ones
from numpy.random import random
from numpy.fft import fft2, ifft2, fftshift, ifftshift
def ffGS(z,target,estimate=None, iterations=20,error=None):
'''
Far field Gerchberg - Saxton Algorithm
Calculates the phase distribution in a object plane (for a given
amplitude constrain) to obtain an specific amplitude distribution in
the target plane.
It uses the Gerchberg - Saxton algorithm for Fraunhoffer propagation.
A FFT implementation of the Fraunhoffer Transform is used.
**ARGUMENTS:**
========== ======================================================
z Propagation distance. This is used to calculate the
resolution needed in the object plane, for a given
target resolution.
target :class:`Field` instance whose amplitude distribution
is used to represent the amplitude constrain to be
applied in the target plane. The phase of this field
is not used.
estimate :class:`Field` instance used as initial estimate for
the problem. The amplitude of this field is taken as
the reference amplitude and the phase is obtained. The
resolution used to define this field must match the
value needed to obtain the required target resolution
when the FFT-Fraunhoffer transform is used. If the
wrong value is given an exception is raised.
If not given, a unitary amplitude wave, with random
phase and the correct resolution, is used.
iterations Maximum number of iterations
error Expected error
========== ======================================================
.. note : target and object must have the same wavelength
**RETURN VALUE:**
(holo,err)
==== ==========================================================
holo Field instance, containing the reference amplitude
information and the phase obtained from the iterative
algorithm. The holo.res attribute contains the
resolution of the calculated hologram for the given
propagation distance. The holo.l attribute contains the
wavelenght used to calculate the hologram.
err Final error obtained
==== ==========================================================
'''
if estimate==None:
edata=exp(2.j*pi*random(target.shape))
sx,sy=target.size
dxe=target.l*z/sx
dye=target.l*z/sy
estimate=Field(data=edata,psize=(dxe,dye),l=target.l)
assert estimate.shape==target.shape,\
"The estimate field, and the target field, must have the same shape"
assert target.l==estimate.l,\
"The wave lenghts for the reference beam, and the target must be equal"
sx,sy=target.size
dxe=target.l*z/sx
dye=target.l*z/sy
dx,dy=estimate.res
assert (dxe==dx) and (dye==dy),\
"The resolution for the reference beam, and the target must be equal"
holo=estimate
eabs=estimate.abs()
#Normalized Target amplitude
ntarget=target.abs()/target.abs().max()
for n in range(iterations):
if n!=0: holo=imp.propagate_fraunhofer(-z)
#Keep only the phase in the hologram plane
holo.data=exp(1.j*holo.angle)
holo=holo*eabs
#Calculate the new image plane
imp=holo.propagate_fraunhofer(z)
err=(ntarget-imp.abs()/imp.abs().max()).std()
if error!=None and err<error: break
d=exp(1.j*imp.angle)
imp=Field(data=d, psize=imp.psize, l=imp.l)
imp=imp*target.abs()
return holo,err
def fftGS(z,target,estimate=None, iterations=20,error=None,flagRand=True):
'''
Far field Gerchberg - Saxton Algorithm
Calculates the phase distribution in a object plane (for a given
amplitude constrain) to obtain an specific amplitude distribution in
the target plane.
It uses the Gerchberg - Saxton algorithm for far-field propagation,
using a standard FFT.
**ARGUMENTS:**
========== ======================================================
z Propagation distance. This is used to calculate the
resolution needed in the object plane, for a given
target resolution.
target :class:`Field` instance whose amplitude distribution
is used to represent the amplitude constrain to be
applied in the target plane. The phase of this field
is not used.
estimate :class:`Field` instance used as initial estimate for
the problem. The amplitude of this field is taken as
the reference amplitude and the phase is obtained. The
resolution used to define this field must match the
value needed to obtain the required target resolution
when the FFT-Fraunhoffer transform is used. If the
wrong value is given an exception is raised.
If not given, a unitary amplitude wave, with random
phase and the correct resolution, is used.
iterations Maximum number of iterations
error Expected error
========== ======================================================
.. note : target and object must have the same wavelength
**RETURN VALUE:**
(holo,err)
==== ==========================================================
holo Field instance, containing the reference amplitude
information and the phase obtained from the iterative
algorithm. The holo.res attribute contains the
resolution of the calculated hologram for the given
propagation distance. The holo.l attribute contains the
wavelenght used to calculate the hologram.
err Final error obtained
==== ==========================================================
'''
if estimate==None:
if flagRand:
edata=exp(2.j*pi*random(target.shape))
else:
edata=exp(2.j*pi*ones(target.shape))
sx,sy=target.size
dxe=target.l*z/sx
dye=target.l*z/sy
estimate=Field(data=edata,psize=(dxe,dye),l=target.l)
assert estimate.shape==target.shape,\
"The estimate field, and the target field, must have the same shape"
assert target.l==estimate.l,\
"The wave lenghts for the reference beam, and the target must be equal"
sx,sy=target.size
dxe=target.l*z/sx
dye=target.l*z/sy
dx,dy=estimate.res
assert (dxe==dx) and (dye==dy),\
"The resolution for the reference beam, and the target must be equal"
holo=estimate.data
eabs=estimate.abs()
#Normalized Target amplitude
ntarget=target.abs()/target.abs().max()
for n in range(iterations):
if n!=0: holo=fftshift(fft2(ifftshift(imp)))
#Keep only the phase in the hologram plane
holo=exp(1.j*angle(holo))
holo=holo*eabs
#Calculate the new image plane
imp=ifftshift(ifft2(fftshift(holo)))
err=(ntarget-abs(imp)/abs(imp).max()).std()
if error!=None and err<error: break
d=exp(1.j*angle(imp))
imp=d*target.abs()
holo=Field(data=holo, psize=(dxe,dye), l=target.l)
return holo,err
def frGS(z,target,estimate=None, iterations=20,error=None):
'''
Fresnel transform Gerchberg - Saxton Algorithm
Calculates the phase distribution in a object plane (for a given
amplitude constrain) to obtain an specific amplitude distribution in
the target plane.
A FFT implementation of the Fresnel Transform is used.
**ARGUMENTS:**
========== ======================================================
z Propagation distance. This is used to calculate the
resolution needed in the object plane, for a given
target resolution.
target :class:`Field` instance whose amplitude distribution
is used to represent the amplitude constrain to be
applied in the target plane. The phase of this field
is not used.
estimate :class:`Field` instance used as initial estimate for
the problem. The amplitude of this field is taken as
the reference amplitude and the phase is obtained. The
resolution used to define this field must match the
value needed to obtain the required target resolution
when the FFT-Fresnel transform is used. If the
wrong value is given an exception is raised.
If not given, a unitary amplitude wave, with random
phase and the correct resolution, is used.
iterations Maximum number of iterations
error Expected error
========== ======================================================
.. note : target and object must have the same wavelength
**RETURN VALUE:**
(holo,err)
==== ===========================================================
holo Field instance, containing the reference amplitude
information and the phase obtained from the iterative
algorithm. The holo.res attribute contains the
resolution of the calculated hologram for the given
propagation distance. The holo.l attribute contains the
wavelenght used to calculate the hologram.
err Final error obtained
==== ===========================================================
'''
if estimate==None:
edata=exp(2.j*pi*random(target.shape))
sx,sy=target.size
dxe=target.l*z/sx
dye=target.l*z/sy
estimate=Field(data=edata,psize=(dxe,dye),l=target.l)
assert estimate.shape==target.shape,\
"The estimate field, and the target field, must have the same shape"
assert target.l==estimate.l,\
"The wave lenghts for the reference beam, and the target must be equal"
sx,sy=target.size
dxe=target.l*z/sx
dye=target.l*z/sy
dx,dy=estimate.res
assert (dxe==dx) and (dye==dy),\
"The resolution for the reference beam, and the target must be equal"
holo=estimate
eabs=estimate.abs()
#Normalized Target amplitude
ntarget=target.abs()/target.abs().max()
for n in range(iterations):
if n!=0: holo=imp.propagate_fresnel(-z)
#Keep only the phase in the hologram plane
holo.data=exp(1.j*holo.angle)
#Apply the amplitude constain
holo=holo*eabs
#Calculate the new image plane
imp=holo.propagate_fresnel(z)
err=(ntarget-imp.abs()/imp.abs().max()).std()
if error!=None and err<error: break
d=exp(1.j*imp.angle)
imp=Field(data=d, psize=imp.psize, l=imp.l)
#Apply the amplitude constain
imp=imp*target.abs()
return holo,err
def asGS(z,target,estimate=None, iterations=20,error=None):
'''
Angular spectrum Gerchberg - Saxton Algorithm
Calculates the phase distribution in a object plane (for a given
amplitude constrain) to obtain an specific amplitude distribution in
the target plane.
It uses the Gerchberg - Saxton algorithm for the angular spectrum
propagation.
**ARGUMENTS:**
========== =====================================================
z Propagation distance. This is used to calculate the
resolution needed in the object plane, for a given
target resolution.
target :class:`Field` instance whose amplitude distribution
is used to represent the amplitude constrain to be
applied in the target plane. The phase of this field
is not used.
estimate :class:`Field` instance used as initial estimate for
the problem. The amplitude of this field is taken as
the reference amplitude and the phase is obtained. It
must have the same resolution as the target field.
If not given, a unitary amplitude wave, with random
phase and the correct resolution, is used.
iterations Maximum number of iterations
error Expected error
========== =====================================================
.. note : target and object must have the same wavelength
**RETURN VALUE:**
(holo,err)
==== ===========================================================
holo Field instance, containing the reference amplitude
information and the phase obtained from the iterative
algorithm. The holo.res attribute contains the
resolution of the calculated hologram for the given
propagation distance. The holo.l attribute contains the
wavelenght used to calculate the hologram.
err Final error obtained
==== ===========================================================
'''
if estimate==None:
edata=exp(2.j*pi*random(target.shape))
sx,sy=target.size
dxe=target.l*z/sx
dye=target.l*z/sy
estimate=Field(data=edata,psize=(dxe,dye),l=target.l)
assert estimate.shape==target.shape,\
"The estimate field, and the target field, must have the same shape"
assert target.l==estimate.l,\
"The wave lenghts for the reference beam, and the target must be equal"
dxe,dye=target.res
dx,dy=estimate.res
assert (dxe==dx) and (dye==dy),\
"The resolution for the estimate beam, and the target must be equal"
holo=estimate
eabs=estimate.abs()
#Normalized Target amplitude
ntarget=target.abs()/target.abs().max()
for n in range(iterations):
if n!=0: holo=imp.propagate_ae(-z)
#Keep only the phase in the hologram plane
holo.data=exp(1.j*holo.angle)
holo=holo*eabs
#Calculate the new image plane
imp=holo.propagate_ae(z)
err=(ntarget-imp.abs()/imp.abs().max()).std()
if error!=None and err<error: break
d=exp(1.j*imp.angle)
imp=Field(data=d, psize=imp.psize, l=imp.l)
imp=imp*target.abs()
return holo,err
| wschoenell/pyoptools | pyoptools/wavefront/calc/gs.py | Python | bsd-3-clause | 13,134 |
from __future__ import print_function
from numpy import pi, arange, sin, cos
from bokeh.browserlib import view
from bokeh.document import Document
from bokeh.models.glyphs import Circle
from bokeh.models import (
Plot, DataRange1d, LinearAxis, Grid,
ColumnDataSource, PanTool, WheelZoomTool
)
from bokeh.client import push_session
document = Document()
session = push_session(document)
x = arange(-2*pi, 2*pi, 0.1)
y = sin(x)
r = (cos(x)+1) * 6 + 6
source = ColumnDataSource(data=dict(x=x, y=y, r=r))
xdr = DataRange1d()
ydr = DataRange1d()
plot = Plot(x_range=xdr, y_range=ydr, min_border=80)
circle = Circle(
x="x", y="y", size="r",
fill_color="red", line_color="black"
)
plot.add_glyph(source, circle)
xaxis = LinearAxis()
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis()
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
plot.add_tools(PanTool(), WheelZoomTool())
document.add_root(plot)
session.show(plot)
| maxalbert/bokeh | examples/glyphs/glyph2_server.py | Python | bsd-3-clause | 1,037 |
# Copyright (c) 2014 OpenStack Foundation, all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from neutron_lib import constants as p_const
from neutron_lib import context
from neutron_lib import exceptions as exc
from neutron_lib.plugins.ml2 import api
from oslo_config import cfg
from six import moves
import testtools
from testtools import matchers
from neutron.plugins.ml2.drivers import type_tunnel
TUNNEL_IP_ONE = "10.10.10.10"
TUNNEL_IP_TWO = "10.10.10.20"
TUNNEL_IPV6_ONE = "2001:db8:1::10"
HOST_ONE = 'fake_host_one'
HOST_TWO = 'fake_host_two'
TUN_MIN = 100
TUN_MAX = 109
TUNNEL_RANGES = [(TUN_MIN, TUN_MAX)]
UPDATED_TUNNEL_RANGES = [(TUN_MIN + 5, TUN_MAX + 5)]
class TunnelTypeTestMixin(object):
DRIVER_CLASS = None
TYPE = None
def setUp(self):
super(TunnelTypeTestMixin, self).setUp()
self.driver = self.DRIVER_CLASS()
self.driver.tunnel_ranges = TUNNEL_RANGES
self.driver.sync_allocations()
self.context = context.Context()
def test_tunnel_type(self):
self.assertEqual(self.TYPE, self.driver.get_type())
def test_validate_provider_segment(self):
segment = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: 'phys_net',
api.SEGMENTATION_ID: None}
with testtools.ExpectedException(exc.InvalidInput):
self.driver.validate_provider_segment(segment)
segment[api.PHYSICAL_NETWORK] = None
self.driver.validate_provider_segment(segment)
segment[api.SEGMENTATION_ID] = 1
self.driver.validate_provider_segment(segment)
def test_sync_tunnel_allocations(self):
self.assertIsNone(
self.driver.get_allocation(self.context, (TUN_MIN - 1)))
self.assertFalse(
self.driver.get_allocation(self.context, (TUN_MIN)).allocated)
self.assertFalse(
self.driver.get_allocation(self.context, (TUN_MIN + 1)).allocated)
self.assertFalse(
self.driver.get_allocation(self.context, (TUN_MAX - 1)).allocated)
self.assertFalse(
self.driver.get_allocation(self.context, (TUN_MAX)).allocated)
self.assertIsNone(
self.driver.get_allocation(self.context, (TUN_MAX + 1)))
self.driver.tunnel_ranges = UPDATED_TUNNEL_RANGES
self.driver.sync_allocations()
self.assertIsNone(
self.driver.get_allocation(self.context, (TUN_MIN + 5 - 1)))
self.assertFalse(
self.driver.get_allocation(self.context, (TUN_MIN + 5)).allocated)
self.assertFalse(
self.driver.get_allocation(self.context,
(TUN_MIN + 5 + 1)).allocated)
self.assertFalse(
self.driver.get_allocation(self.context,
(TUN_MAX + 5 - 1)).allocated)
self.assertFalse(
self.driver.get_allocation(self.context, (TUN_MAX + 5)).allocated)
self.assertIsNone(
self.driver.get_allocation(self.context, (TUN_MAX + 5 + 1)))
def _test_sync_allocations_and_allocated(self, tunnel_id):
segment = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: None,
api.SEGMENTATION_ID: tunnel_id}
self.driver.reserve_provider_segment(self.context, segment)
self.driver.tunnel_ranges = UPDATED_TUNNEL_RANGES
self.driver.sync_allocations()
self.assertTrue(
self.driver.get_allocation(self.context, tunnel_id).allocated)
def test_sync_allocations_and_allocated_in_initial_range(self):
self._test_sync_allocations_and_allocated(TUN_MIN + 2)
def test_sync_allocations_and_allocated_in_final_range(self):
self._test_sync_allocations_and_allocated(TUN_MAX + 2)
def test_sync_allocations_no_op(self):
def verify_no_chunk(iterable, chunk_size):
# no segment removed/added
self.assertEqual(0, len(list(iterable)))
return []
with mock.patch.object(
type_tunnel, 'chunks', side_effect=verify_no_chunk) as chunks:
self.driver.sync_allocations()
self.assertEqual(2, len(chunks.mock_calls))
def test_partial_segment_is_partial_segment(self):
segment = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: None,
api.SEGMENTATION_ID: None}
self.assertTrue(self.driver.is_partial_segment(segment))
def test_specific_segment_is_not_partial_segment(self):
segment = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: None,
api.SEGMENTATION_ID: 101}
self.assertFalse(self.driver.is_partial_segment(segment))
def test_reserve_provider_segment_full_specs(self):
segment = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: None,
api.SEGMENTATION_ID: 101}
observed = self.driver.reserve_provider_segment(self.context, segment)
alloc = self.driver.get_allocation(self.context,
observed[api.SEGMENTATION_ID])
self.assertTrue(alloc.allocated)
with testtools.ExpectedException(exc.TunnelIdInUse):
self.driver.reserve_provider_segment(self.context, segment)
self.driver.release_segment(self.context, segment)
alloc = self.driver.get_allocation(self.context,
observed[api.SEGMENTATION_ID])
self.assertFalse(alloc.allocated)
segment[api.SEGMENTATION_ID] = 1000
observed = self.driver.reserve_provider_segment(self.context, segment)
alloc = self.driver.get_allocation(self.context,
observed[api.SEGMENTATION_ID])
self.assertTrue(alloc.allocated)
self.driver.release_segment(self.context, segment)
alloc = self.driver.get_allocation(self.context,
observed[api.SEGMENTATION_ID])
self.assertIsNone(alloc)
def test_reserve_provider_segment(self):
tunnel_ids = set()
specs = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: 'None',
api.SEGMENTATION_ID: None}
for x in moves.range(TUN_MIN, TUN_MAX + 1):
segment = self.driver.reserve_provider_segment(self.context,
specs)
self.assertEqual(self.TYPE, segment[api.NETWORK_TYPE])
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.GreaterThan(TUN_MIN - 1))
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.LessThan(TUN_MAX + 1))
tunnel_ids.add(segment[api.SEGMENTATION_ID])
with testtools.ExpectedException(exc.NoNetworkAvailable):
segment = self.driver.reserve_provider_segment(self.context,
specs)
segment = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: 'None',
api.SEGMENTATION_ID: tunnel_ids.pop()}
self.driver.release_segment(self.context, segment)
segment = self.driver.reserve_provider_segment(self.context, specs)
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.GreaterThan(TUN_MIN - 1))
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.LessThan(TUN_MAX + 1))
tunnel_ids.add(segment[api.SEGMENTATION_ID])
for tunnel_id in tunnel_ids:
segment[api.SEGMENTATION_ID] = tunnel_id
self.driver.release_segment(self.context, segment)
def test_allocate_tenant_segment(self):
tunnel_ids = set()
for x in moves.range(TUN_MIN, TUN_MAX + 1):
segment = self.driver.allocate_tenant_segment(self.context)
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.GreaterThan(TUN_MIN - 1))
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.LessThan(TUN_MAX + 1))
tunnel_ids.add(segment[api.SEGMENTATION_ID])
segment = self.driver.allocate_tenant_segment(self.context)
self.assertIsNone(segment)
segment = {api.NETWORK_TYPE: self.TYPE,
api.PHYSICAL_NETWORK: 'None',
api.SEGMENTATION_ID: tunnel_ids.pop()}
self.driver.release_segment(self.context, segment)
segment = self.driver.allocate_tenant_segment(self.context)
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.GreaterThan(TUN_MIN - 1))
self.assertThat(segment[api.SEGMENTATION_ID],
matchers.LessThan(TUN_MAX + 1))
tunnel_ids.add(segment[api.SEGMENTATION_ID])
for tunnel_id in tunnel_ids:
segment[api.SEGMENTATION_ID] = tunnel_id
self.driver.release_segment(self.context, segment)
def add_endpoint(self, ip=TUNNEL_IP_ONE, host=HOST_ONE):
return self.driver.add_endpoint(ip, host)
def test_add_endpoint(self):
endpoint = self.add_endpoint()
self.assertEqual(TUNNEL_IP_ONE, endpoint.ip_address)
self.assertEqual(HOST_ONE, endpoint.host)
return endpoint
def test_add_endpoint_for_existing_tunnel_ip(self):
self.add_endpoint()
with mock.patch.object(type_tunnel.LOG, 'warning') as log_warn:
self.add_endpoint()
log_warn.assert_called_once_with(mock.ANY, TUNNEL_IP_ONE)
def test_get_endpoint_by_host(self):
self.add_endpoint()
host_endpoint = self.driver.get_endpoint_by_host(HOST_ONE)
self.assertEqual(TUNNEL_IP_ONE, host_endpoint.ip_address)
return host_endpoint
def test_get_endpoint_by_host_for_not_existing_host(self):
ip_endpoint = self.driver.get_endpoint_by_host(HOST_TWO)
self.assertIsNone(ip_endpoint)
def test_get_endpoint_by_ip(self):
self.add_endpoint()
ip_endpoint = self.driver.get_endpoint_by_ip(TUNNEL_IP_ONE)
self.assertEqual(HOST_ONE, ip_endpoint.host)
return ip_endpoint
def test_get_endpoint_by_ip_for_not_existing_tunnel_ip(self):
ip_endpoint = self.driver.get_endpoint_by_ip(TUNNEL_IP_TWO)
self.assertIsNone(ip_endpoint)
def test_delete_endpoint(self):
self.add_endpoint()
self.assertIsNone(self.driver.delete_endpoint(TUNNEL_IP_ONE))
# Get all the endpoints and verify its empty
endpoints = self.driver.get_endpoints()
self.assertNotIn(TUNNEL_IP_ONE, endpoints)
class TunnelTypeMultiRangeTestMixin(object):
DRIVER_CLASS = None
TUN_MIN0 = 100
TUN_MAX0 = 101
TUN_MIN1 = 200
TUN_MAX1 = 201
TUNNEL_MULTI_RANGES = [(TUN_MIN0, TUN_MAX0), (TUN_MIN1, TUN_MAX1)]
def setUp(self):
super(TunnelTypeMultiRangeTestMixin, self).setUp()
self.driver = self.DRIVER_CLASS()
self.driver.tunnel_ranges = self.TUNNEL_MULTI_RANGES
self.driver.sync_allocations()
self.context = context.Context()
def test_release_segment(self):
segments = [self.driver.allocate_tenant_segment(self.context)
for i in range(4)]
# Release them in random order. No special meaning.
for i in (0, 2, 1, 3):
self.driver.release_segment(self.context, segments[i])
for key in (self.TUN_MIN0, self.TUN_MAX0,
self.TUN_MIN1, self.TUN_MAX1):
alloc = self.driver.get_allocation(self.context, key)
self.assertFalse(alloc.allocated)
class TunnelRpcCallbackTestMixin(object):
DRIVER_CLASS = None
TYPE = None
def setUp(self):
super(TunnelRpcCallbackTestMixin, self).setUp()
self.driver = self.DRIVER_CLASS()
def _test_tunnel_sync(self, kwargs, delete_tunnel=False):
with mock.patch.object(self.notifier,
'tunnel_update') as tunnel_update,\
mock.patch.object(self.notifier,
'tunnel_delete') as tunnel_delete:
details = self.callbacks.tunnel_sync('fake_context', **kwargs)
tunnels = details['tunnels']
for tunnel in tunnels:
self.assertEqual(kwargs['tunnel_ip'], tunnel['ip_address'])
self.assertEqual(kwargs['host'], tunnel['host'])
self.assertTrue(tunnel_update.called)
if delete_tunnel:
self.assertTrue(tunnel_delete.called)
else:
self.assertFalse(tunnel_delete.called)
def _test_tunnel_sync_raises(self, kwargs):
with mock.patch.object(self.notifier,
'tunnel_update') as tunnel_update,\
mock.patch.object(self.notifier,
'tunnel_delete') as tunnel_delete:
self.assertRaises(exc.InvalidInput,
self.callbacks.tunnel_sync,
'fake_context', **kwargs)
self.assertFalse(tunnel_update.called)
self.assertFalse(tunnel_delete.called)
def test_tunnel_sync_called_without_host_passed(self):
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'tunnel_type': self.TYPE,
'host': None}
self._test_tunnel_sync(kwargs)
def test_tunnel_sync_called_with_host_passed_for_existing_tunnel_ip(self):
self.driver.add_endpoint(TUNNEL_IP_ONE, None)
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'tunnel_type': self.TYPE,
'host': HOST_ONE}
self._test_tunnel_sync(kwargs)
def test_tunnel_sync_called_with_host_passed(self):
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'tunnel_type': self.TYPE,
'host': HOST_ONE}
self._test_tunnel_sync(kwargs)
def test_tunnel_sync_called_with_host_passed_ipv6(self):
cfg.CONF.set_override('overlay_ip_version', p_const.IP_VERSION_6,
group='ml2')
kwargs = {'tunnel_ip': TUNNEL_IPV6_ONE, 'tunnel_type': self.TYPE,
'host': HOST_ONE}
self._test_tunnel_sync(kwargs)
def test_tunnel_sync_called_for_existing_endpoint(self):
self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE)
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'tunnel_type': self.TYPE,
'host': HOST_ONE}
self._test_tunnel_sync(kwargs)
def test_tunnel_sync_called_for_existing_host_with_tunnel_ip_changed(self):
self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE)
kwargs = {'tunnel_ip': TUNNEL_IP_TWO, 'tunnel_type': self.TYPE,
'host': HOST_ONE}
self._test_tunnel_sync(kwargs, True)
def test_tunnel_sync_called_with_used_tunnel_ip_host_roaming(self):
self.driver.add_endpoint(TUNNEL_IP_ONE, HOST_ONE)
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'tunnel_type': self.TYPE,
'host': HOST_TWO}
self._test_tunnel_sync(kwargs, False)
def test_tunnel_sync_called_with_used_tunnel_ip_roaming_case_two(self):
self.driver.add_endpoint(TUNNEL_IP_ONE, None)
self.driver.add_endpoint(TUNNEL_IP_TWO, HOST_TWO)
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'tunnel_type': self.TYPE,
'host': HOST_TWO}
self._test_tunnel_sync(kwargs, False)
def test_tunnel_sync_called_without_tunnel_ip(self):
kwargs = {'tunnel_type': self.TYPE, 'host': None}
self._test_tunnel_sync_raises(kwargs)
def test_tunnel_sync_called_without_tunnel_type(self):
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'host': None}
self._test_tunnel_sync_raises(kwargs)
def test_tunnel_sync_called_with_tunnel_overlay_mismatch(self):
cfg.CONF.set_override('overlay_ip_version', p_const.IP_VERSION_6,
group='ml2')
kwargs = {'tunnel_ip': TUNNEL_IP_ONE, 'tunnel_type': self.TYPE,
'host': HOST_ONE}
self._test_tunnel_sync_raises(kwargs)
def test_tunnel_sync_called_with_tunnel_overlay_mismatch_ipv6(self):
cfg.CONF.set_override('overlay_ip_version', p_const.IP_VERSION_4,
group='ml2')
kwargs = {'tunnel_ip': TUNNEL_IPV6_ONE, 'tunnel_type': self.TYPE,
'host': HOST_ONE}
self._test_tunnel_sync_raises(kwargs)
class TunnelTypeMTUTestMixin(object):
DRIVER_CLASS = None
TYPE = None
ENCAP_OVERHEAD = 0
def setUp(self):
super(TunnelTypeMTUTestMixin, self).setUp()
self.driver = self.DRIVER_CLASS()
def _test_get_mtu(self, ip_version):
cfg.CONF.set_override('overlay_ip_version', ip_version,
group='ml2')
ip_header_length = p_const.IP_HEADER_LENGTH[ip_version]
cfg.CONF.set_override('global_physnet_mtu', 1500)
cfg.CONF.set_override('path_mtu', 1475, group='ml2')
self.driver.physnet_mtus = {'physnet1': 1450, 'physnet2': 1400}
self.assertEqual(1475 - self.ENCAP_OVERHEAD - ip_header_length,
self.driver.get_mtu('physnet1'))
cfg.CONF.set_override('global_physnet_mtu', 1450)
cfg.CONF.set_override('path_mtu', 1475, group='ml2')
self.driver.physnet_mtus = {'physnet1': 1400, 'physnet2': 1425}
self.assertEqual(1450 - self.ENCAP_OVERHEAD - ip_header_length,
self.driver.get_mtu('physnet1'))
cfg.CONF.set_override('global_physnet_mtu', 0)
cfg.CONF.set_override('path_mtu', 1450, group='ml2')
self.driver.physnet_mtus = {'physnet1': 1425, 'physnet2': 1400}
self.assertEqual(1450 - self.ENCAP_OVERHEAD - ip_header_length,
self.driver.get_mtu('physnet1'))
cfg.CONF.set_override('global_physnet_mtu', 0)
cfg.CONF.set_override('path_mtu', 0, group='ml2')
self.driver.physnet_mtus = {}
self.assertEqual(0, self.driver.get_mtu('physnet1'))
def test_get_mtu_ipv4(self):
self._test_get_mtu(4)
def test_get_mtu_ipv6(self):
self._test_get_mtu(6)
| noironetworks/neutron | neutron/tests/unit/plugins/ml2/drivers/base_type_tunnel.py | Python | apache-2.0 | 18,769 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# metadata
from __future__ import unicode_literals
import frappe, os
from frappe.model.meta import Meta
from frappe.modules import scrub, get_module_path, load_doctype_module
from frappe.utils import get_html_format
from frappe.translate import make_dict_from_messages, extract_messages_from_code
from frappe.model.utils import render_include
from frappe.build import scrub_html_template
import io
from six import iteritems
def get_meta(doctype, cached=True):
if cached and not frappe.conf.developer_mode:
meta = frappe.cache().hget("form_meta", doctype, lambda: FormMeta(doctype))
else:
meta = FormMeta(doctype)
if frappe.local.lang != 'en':
meta.set_translations(frappe.local.lang)
return meta
class FormMeta(Meta):
def __init__(self, doctype):
super(FormMeta, self).__init__(doctype)
self.load_assets()
def load_assets(self):
self.add_search_fields()
self.add_linked_document_type()
if not self.istable:
self.add_code()
self.add_custom_script()
self.load_print_formats()
self.load_workflows()
self.load_templates()
self.load_dashboard()
self.load_kanban_meta()
def as_dict(self, no_nulls=False):
d = super(FormMeta, self).as_dict(no_nulls=no_nulls)
for k in ("__js", "__css", "__list_js", "__calendar_js", "__map_js",
"__linked_with", "__messages", "__print_formats", "__workflow_docs",
"__form_grid_templates", "__listview_template", "__tree_js",
"__dashboard", "__kanban_boards", "__kanban_column_fields", '__templates',
'__custom_js'):
d[k] = self.get(k)
for i, df in enumerate(d.get("fields")):
for k in ("search_fields", "is_custom_field", "linked_document_type"):
df[k] = self.get("fields")[i].get(k)
return d
def add_code(self):
if self.custom:
return
path = os.path.join(get_module_path(self.module), 'doctype', scrub(self.name))
def _get_path(fname):
return os.path.join(path, scrub(fname))
system_country = frappe.get_system_settings("country")
self._add_code(_get_path(self.name + '.js'), '__js')
if system_country:
self._add_code(_get_path(os.path.join('regional', system_country + '.js')), '__js')
self._add_code(_get_path(self.name + '.css'), "__css")
self._add_code(_get_path(self.name + '_list.js'), '__list_js')
self._add_code(_get_path(self.name + '_calendar.js'), '__calendar_js')
self._add_code(_get_path(self.name + '_tree.js'), '__tree_js')
listview_template = _get_path(self.name + '_list.html')
if os.path.exists(listview_template):
self.set("__listview_template", get_html_format(listview_template))
self.add_code_via_hook("doctype_js", "__js")
self.add_code_via_hook("doctype_list_js", "__list_js")
self.add_code_via_hook("doctype_tree_js", "__tree_js")
self.add_code_via_hook("doctype_calendar_js", "__calendar_js")
self.add_html_templates(path)
def _add_code(self, path, fieldname):
js = get_js(path)
if js:
self.set(fieldname, (self.get(fieldname) or "")
+ "\n\n/* Adding {0} */\n\n".format(path) + js)
def add_html_templates(self, path):
if self.custom:
return
templates = dict()
for fname in os.listdir(path):
if fname.endswith(".html"):
with io.open(os.path.join(path, fname), 'r', encoding = 'utf-8') as f:
templates[fname.split('.')[0]] = scrub_html_template(f.read())
self.set("__templates", templates or None)
def add_code_via_hook(self, hook, fieldname):
for path in get_code_files_via_hooks(hook, self.name):
self._add_code(path, fieldname)
def add_custom_script(self):
"""embed all require files"""
# custom script
custom = frappe.db.get_value("Custom Script", {"dt": self.name,
"script_type": "Client"}, "script") or ""
self.set("__custom_js", custom)
def add_search_fields(self):
"""add search fields found in the doctypes indicated by link fields' options"""
for df in self.get("fields", {"fieldtype": "Link", "options":["!=", "[Select]"]}):
if df.options:
search_fields = frappe.get_meta(df.options).search_fields
if search_fields:
search_fields = search_fields.split(",")
df.search_fields = [sf.strip() for sf in search_fields]
def add_linked_document_type(self):
for df in self.get("fields", {"fieldtype": "Link"}):
if df.options:
try:
df.linked_document_type = frappe.get_meta(df.options).document_type
except frappe.DoesNotExistError:
# edge case where options="[Select]"
pass
def load_print_formats(self):
print_formats = frappe.db.sql("""select * FROM `tabPrint Format`
WHERE doc_type=%s AND docstatus<2 and disabled=0""", (self.name,), as_dict=1,
update={"doctype":"Print Format"})
self.set("__print_formats", print_formats, as_value=True)
def load_workflows(self):
# get active workflow
workflow_name = self.get_workflow()
workflow_docs = []
if workflow_name and frappe.db.exists("Workflow", workflow_name):
workflow = frappe.get_doc("Workflow", workflow_name)
workflow_docs.append(workflow)
for d in workflow.get("states"):
workflow_docs.append(frappe.get_doc("Workflow State", d.state))
self.set("__workflow_docs", workflow_docs, as_value=True)
def load_templates(self):
if not self.custom:
module = load_doctype_module(self.name)
app = module.__name__.split(".")[0]
templates = {}
if hasattr(module, "form_grid_templates"):
for key, path in iteritems(module.form_grid_templates):
templates[key] = get_html_format(frappe.get_app_path(app, path))
self.set("__form_grid_templates", templates)
def set_translations(self, lang):
self.set("__messages", frappe.get_lang_dict("doctype", self.name))
# set translations for grid templates
if self.get("__form_grid_templates"):
for content in self.get("__form_grid_templates").values():
messages = extract_messages_from_code(content)
messages = make_dict_from_messages(messages)
self.get("__messages").update(messages, as_value=True)
def load_dashboard(self):
if self.custom:
return
self.set('__dashboard', self.get_dashboard_data())
def load_kanban_meta(self):
self.load_kanban_boards()
self.load_kanban_column_fields()
def load_kanban_boards(self):
kanban_boards = frappe.get_list(
'Kanban Board', fields=['name', 'filters', 'reference_doctype', 'private'], filters={'reference_doctype': self.name})
self.set("__kanban_boards", kanban_boards, as_value=True)
def load_kanban_column_fields(self):
values = frappe.get_list(
'Kanban Board', fields=['field_name'],
filters={'reference_doctype': self.name})
fields = [x['field_name'] for x in values]
fields = list(set(fields))
self.set("__kanban_column_fields", fields, as_value=True)
def get_code_files_via_hooks(hook, name):
code_files = []
for app_name in frappe.get_installed_apps():
code_hook = frappe.get_hooks(hook, default={}, app_name=app_name)
if not code_hook:
continue
files = code_hook.get(name, [])
if not isinstance(files, list):
files = [files]
for file in files:
path = frappe.get_app_path(app_name, *file.strip("/").split("/"))
code_files.append(path)
return code_files
def get_js(path):
js = frappe.read_file(path)
if js:
return render_include(js)
| chdecultot/frappe | frappe/desk/form/meta.py | Python | mit | 7,240 |
def clamped(x, min=0, max=100):
"""Return x clamped between min and max."""
if x > max: return max
elif x < min: return min
else: return x
def confirm_chance(x, min=0, max=100):
"""
Return True if x greater than min+1 and less than or equal to a
random integer between min or max.
Useful for fast probability "is hit" checks.
"""
if x == min: return False
import random
return x <= random.randint(min+1, max)
| thespeks/pyTRPG | trpg/utils/math.py | Python | gpl-3.0 | 474 |
import os
import sys
import networkx as nx
# alg: http://www.cs.bgu.ac.il/~dekelts/publications/subtree.pdf
# input: G and H
# output: YES if G contains a subtree isomorphic to H, NO otherwise
# 1. select a vertex r of G to be the root of G
# 2. For all u \in H, v \in G, S(v, u) \gets \null
# 3. For all leaves v of G^r do
# 4. For all leaves u of H do S(v, u) \gets N(u)
# 5. For all internal vertices v of G^r in postorder do
# 6. Let v1,\dots,vt be the children of v
# 7. For all vertices u = u_0 of H with degree at most (t + 1) do
# 8. Let u1,\dots,us be the neighbors of of u
# 9. Construct a bipartite graph B(v, u) = (X, Y, E_{vu}), where X = {u1,\dots,us} Y = {v1,\dots,vt}, and E_{vu} = {u_i v_j : u \in S(v_j, u_i) }
# 10. For all 0 \leq i \leq s do
# 11. Compute the size m_i of a maximum matching between X_i and Y
# 12. S(v, u) \gets {u_i : m_i = |X_i|, 0 \leq i \leq s}
# 13. If u \in S(v, u) then Return YES
# 14. end For
# 15. end For
# 16. Return NO
def pause(locals):
while True:
variable = raw_input("> ")
if variable in locals:
print locals[variable]
elif len(variable) == 0:
return
def find_leaves(G, r):
queue = [(r, None)]
leaves = []
visited = []
children = {}
while len(queue) > 0:
(curr, prev) = queue.pop(0)
if curr not in children:
children[curr] = []
neighbors = G.neighbors(curr)
if len(neighbors) == 1 and curr != r:
leaves.append(curr)
else:
if prev != None and prev in neighbors:
neighbors.remove(prev)
for n in neighbors:
queue.append((n, curr))
children[curr].append(n)
return leaves, children
def postorder(G, parent, curr, nodes):
neighbors = G.neighbors(curr)
neighbors = filter(lambda n : n not in nodes, neighbors)
if parent in neighbors:
neighbors.remove(parent)
for n in neighbors:
postorder(G, curr, n, nodes)
nodes.append(curr)
def find_internals(G, r, leaves):
nodes = []
postorder(G, -1, r, nodes)
return filter(lambda x : x not in leaves, nodes)
def find_nodes_of_at_most_degree(G, t):
matches = []
for n in G.nodes():
if len(G.neighbors(n)) <= t:
matches.append(n)
return matches
def subtree_isomorphism(G, H):
# Run the algorithm
r = G.nodes()[0]
# Enumerate all leaves in G by BFS
leaves, children = find_leaves(G, r)
print "Leaves:", leaves
print "Children:", children
# Initialize the S map
S = {}
for u in H.nodes():
for v in G.nodes():
S[(v,u)] = set()
# Initialize S[] based on the leaves of G to start
for gl in leaves:
for u in H.nodes():
hleaves, dummyChildren = find_leaves(H, u)
for hl in hleaves:
S[(gl, hl)] = H.neighbors(u)
### CORRECT TO HERE
# Main loop
internals = find_internals(G, r, leaves)
for i,v in enumerate(internals):
childs = children[v]
t = len(childs)
hdegrees = find_nodes_of_at_most_degree(H, t + 1)
for j,u in enumerate(hdegrees):
uneighbors = H.neighbors(u) # u1,...,us
s = len(uneighbors)
X = uneighbors
Y = childs
edgeSet = []
for uu in X:
for vv in Y:
if (vv,uu) in S:
if u in S[(vv, uu)]:
edgeSet.append((uu, vv))
# Construct the bipartite graph between the two vertex sets
bg = nx.Graph()
bg.add_nodes_from(X, bipartite=0)
bg.add_nodes_from(Y, bipartite=1)
bg.add_edges_from(edgeSet)
#pause(locals())
# Try to find all the maximal matchings for all i = 0..s
mi_vector = []
m_star = 0
X_star = []
for si in range(-1, s):
# Define X_0 = X and X_i = X \ {u_i}
X_i = X # only if i = 0 (si == -1)
u_i = u # fixed.
if si >= 0:
u_i = X[si] # X = uneighbors
X_i = [uu for uu in X if uu != u_i]
testGraph = nx.Graph()
testGraph.add_nodes_from(X_i, bipartite=0)
testGraph.add_nodes_from(Y, bipartite=1)
edgeSet = []
for uu in X_i:
neighbors = bg.neighbors(uu)
for n in neighbors:
edgeSet.append((uu, n))
testGraph.add_edges_from(edgeSet)
m_i = len(nx.maximal_matching(testGraph))
mi_vector.append((m_i, u_i, X_i)) # record the X_i, this can be skipped
#pause(locals())
if m_i > m_star:
m_star = m_i
X_star = X_i
if (v,u) not in S:
S[(v,u)] = set()
for (m_i, u_i, X_i) in mi_vector:
if m_i == len(X_i): #m_star:
S[(v, u)].add(u_i)
if u in S[(v, u)]:
print v, u
return "YES"
return "NO"
# Create G and H
G = nx.Graph()
H = nx.Graph()
G.add_nodes_from([1,2,3,4,5,6,7])
G.add_edge(1,2)
G.add_edge(1,3)
G.add_edge(2,4)
G.add_edge(2,5)
G.add_edge(3,6)
G.add_edge(3,7)
H.add_nodes_from([8,9,10])
H.add_edge(8,9)
H.add_edge(8,10)
print nx.is_tree(G)
print nx.is_tree(H)
print subtree_isomorphism(G, H)
G = nx.Graph()
G.add_nodes_from([1,2,3,4,5])
G.add_edge(1,2)
G.add_edge(2,3)
G.add_edge(3,4)
G.add_edge(4,5)
print nx.is_tree(G)
print nx.is_tree(H)
print subtree_isomorphism(G, H)
| chris-wood/extractor-refactor | src/isomorphic_subtree.py | Python | gpl-2.0 | 5,755 |
#!/usr/bin/python
##############################################################
# Program name: NCAA Basketball Stats Scraper (Schedule Mapping Module)
# Version: 1.0
# By: Rodrigo Zamith
# License: MPL 2.0 (see LICENSE file in root folder)
# Additional thanks:
##############################################################
# Import modules and libraries
import scraperfunctions
import scrapersettings
import csv
from bs4 import BeautifulSoup
if (scrapersettings.map_schedule == 1):
print "Generating schedule mappings"
# Create the file headings
schedule_mappingfile_w = open(scrapersettings.schedule_mappingfile, "w")
schedule_mappingfile_w.writelines("game_id\thome_team_id\taway_team_id\tdate\tneutral_site\tgame_link\n")
# Grab data
# Parse our mappings file to get our list of teams
team_mapping = scraperfunctions.get_team_mappings()
# Create the schedule
schedule_list = [] # Create an empty list for storing all of our games
for value, team in enumerate(team_mapping): # For each team in our dictionary
if scrapersettings.debugmode == 1: print "Processing team " + str(team) + " (" + str(value+1) + " of " + str(len(team_mapping)) + ")"
try:
team_mainpage_data = scraperfunctions.grabber(team_mapping[team][1], scrapersettings.params, scrapersettings.http_header) # Grab the main page for each team
except:
print "Error getting data. Moving on to next game."
continue
team_mainpage_data_soup = BeautifulSoup(team_mainpage_data) # Soupify that page
gamelinks = [] # Create a blank list for each game
for link in team_mainpage_data_soup.find_all('a'): # Locate all links in the document
if "game/index/" in link.get('href'): # If they contain a URL segment suggesting it is a game...
game_link = str(scrapersettings.domain_base + link.get('href')).split("?")[0] # Strip out any URL variables since we don't need them
try:
opponent_id = link.find_previous("td").find_previous("td").find("a").get('href').split("?org_id=")[1]
except:
opponent_id = 0
opponent_text = link.find_previous("td").find_previous("td").get_text().encode('utf-8').strip()
if "@" in opponent_text: # Checks if home or away; note: if it's in a neutral site, this distinction may not be accurate (but a neutral site is flagged). Assumes all games against non-D-I/III competition is at home.
home_team = opponent_id
away_team = team
if "<br/>" in str(link.find_previous("a").encode('utf-8').strip()):
neutral = "1"
else:
neutral = "0"
else:
home_team = team
away_team = opponent_id
neutral = "0"
date = link.find_previous("td").find_previous("td").find_previous("td").get_text() # Get the date for the game
game_id = game_link.split("/")[-1] # Get the game ID from the URL (last set of digits)
schedule_list.append([game_id, home_team, away_team, date, neutral, game_link]) # Append all of this information to our master schedule list
schedule_dict = dict([(case[0], (case[1:])) for case in schedule_list]) # Create a dictionary from our list so we don't have any duplicate entries
for item in schedule_dict: # For each item on that list
schedule_mappingfile_w.writelines(item + "\t" + str(schedule_dict[item][0]) + "\t" + str(schedule_dict[item][1]) + "\t" + str(schedule_dict[item][2]) + "\t" + str(schedule_dict[item][3]) + "\t" + str(schedule_dict[item][4]) + "\n") # Write to our mapping file
print "Successfully generated schedule mappings" | rodzam/ncaab-stats-scraper | create_schedule_mappings.py | Python | mpl-2.0 | 3,857 |
import sys
def F(n): return 1 if n == 0 else n - M(F(n-1))
def M(n): return 0 if n == 0 else n - F(M(n-1))
print(F(int(sys.argv[1])));
print(M(int(sys.argv[1])));
| trizen/language-benchmarks | Interpreted/mutual-recursion/mrec.py | Python | gpl-3.0 | 165 |
"""User EndPoint for AsciiPic."""
from cherrypy import tools
from oslo_log import log as logging
import validate_email
from asciipic.api import base as base_api
from asciipic.db.managers import user
USERS = user.Users
LOG = logging.getLogger(__name__)
class AccountEndpoint(base_api.BaseAPI):
"""User EndPoint for AsciiPic."""
# Whether this application should be available for clients
exposed = True
# pylint: disable=no-self-use
@tools.json_out()
def POST(self, **kwargs):
"""Create a new account."""
response = {
"meta": {
"status": True,
"verbose": "Ok"
},
"content": None
}
username = kwargs.pop("username", None)
password = kwargs.pop("password", None)
email = kwargs.pop("email", None)
# Password check
if len(password) not in range(5, 31):
response["meta"]["status"] = False
response["meta"]["verbose"] = ("Password not the"
" proper length [5, 30].")
# Email checks
if not USERS.check_email(email):
response["meta"]["status"] = False
response["meta"]["verbose"] = "Email already used."
if not validate_email.validate_email(email):
response["meta"]["status"] = False
response["meta"]["verbose"] = "Email-ul nu este valid"
# Username checks
if not USERS.check_username(username):
response["meta"]["status"] = False
response["meta"]["verbose"] = "Username already used."
if len(username) not in range(3, 21):
response["meta"]["status"] = False
response["meta"]["verbose"] = ("Username not the"
" proper length [3, 20].")
if response["meta"]["status"]:
response["meta"]["verbose"] = "User {} created".format(
username)
USERS.create_user(username, password, email)
return response
| micumatei/asciipic | asciipic/api/api_endpoint/user/account.py | Python | mit | 2,066 |
import glob
import os
import re
import yaml
from argparse import ArgumentParser
from .runner import Runner
from .cli import CLI
class GhcRunner(Runner):
IMPORT_REGEX = re.compile(r'^\s*import\s+(.*?)$')
def reset(self):
self.required = []
self.incdirs = []
def make_code(self, file, filepath, filename):
files = dict()
code = ''
for line in file:
m = self.IMPORT_REGEX.match(line)
if m:
files.update(self.on_import(m.group(1)))
code += line
files[filename] = code
return files
def set_search_path(self, paths):
self.search_path = paths
def add_search_path(self, path):
self.search_path.append(path)
def on_import(self, path):
files = dict()
# TODO
return files
def build_compiler_options(self, options):
super(GhcRunner, self).build_compiler_options(options)
self.add_commandline_options('-dynamic')
class GhcCLI(CLI):
def __init__(self, compiler=None):
super(GhcCLI, self).__init__('Haskell', compiler)
def get_runner(self, args, options):
return GhcRunner(args.language, args.compiler, args.save, args.encoding, args.retry, args.retry_wait)
class HaskellStackCLI:
class InnerCLI(GhcCLI):
def __init__(self, compiler=None):
self.libdirs = []
super(HaskellStackCLI.InnerCLI, self).__init__(compiler)
def get_runner(self, args, options):
runner = super(HaskellStackCLI.InnerCLI, self).get_runner(args, options)
runner.set_search_path(self.libdirs)
return runner
def __init__(self, compiler=None):
self.setup(compiler)
# command line option
def setup(self, compiler):
self.parser = ArgumentParser(add_help=False)
self.parser.add_argument(
'-c',
'--compiler',
default=compiler
)
self.parser.add_argument(
'-n',
'--dryrun',
action='store_true',
help='dryrun'
)
subparser = self.parser.add_subparsers()
run_cmd = subparser.add_parser(
'run',
prefix_chars='+',
description='build and run command',
help='build and run command. see `run +h`'
)
build_cmd = subparser.add_parser(
'build',
prefix_chars='+',
description='build and run command (run command alias)',
help='build and run command (run command alias). see `build +h`'
)
passthrough_cmds = [run_cmd, build_cmd]
for passthrough_cmd in passthrough_cmds:
passthrough_cmd.set_defaults(handler=self.command_run)
passthrough_cmd.add_argument(
'options',
metavar='OPTIONS',
nargs='*',
help='options'
)
def parse_command_line(self, argv):
opts, args = self.parser.parse_known_args(argv)
if 'WANDBOX_DRYRUN' in os.environ:
opts.dryrun = True
return opts, args
def print_help(self):
self.parser.print_help()
def execute(self):
self.execute_with_args()
def execute_with_args(self, args=None):
opts, args = self.parse_command_line(args)
if hasattr(opts, 'handler'):
opts.handler(opts, args)
else:
self.print_help()
def command_run(self, opts, args):
cmd = HaskellStackCLI.InnerCLI(opts.compiler)
run_options = ['run']
cli_options = args
if opts.dryrun:
cli_options.append('--dryrun')
with open('package.yaml', 'r') as yml:
config = yaml.safe_load(yml)
exec_config = config['executables']['haskell-stack-exe']
main = exec_config['main']
main_dir = exec_config['source-dirs']
run_options.append(os.path.join(main_dir, main))
options = exec_config['ghc-options']
run_options.extend(options)
dirs = config['library']['source-dirs']
if isinstance(dirs, str):
dirs = [dirs]
for dir in dirs:
cmd.libdirs.append(dir)
for x in glob.glob(os.path.join(dir, '*.hs')):
run_options.append(x)
cmd.execute_with_args(cli_options + run_options)
def ghc(compiler=None):
cli = GhcCLI(compiler)
cli.execute()
def haskell_stack(compiler=None):
cli = HaskellStackCLI(compiler)
cli.execute()
def main():
ghc()
if __name__ == '__main__':
main()
| srz-zumix/wandbox-api | wandbox/__ghc__.py | Python | mit | 4,680 |
"""
A hapax legomenon (often abbreviated to hapax)
is a word which occurs only once in either the
written record of a language, the works of an author,
or in a single text. Define a function that given
the file name of a text will return all its hapaxes.
Make sure your program ignores capitalization.
"""
def text_converter(raw_text): return [''.join(ch for ch in word if ch.isalnum()).lower() for word in raw_text.split()]
def hapax(text): return [word for word in text if text.count(word) == 1]
def hapax_file(file_name):
with open(file_name, 'r') as file_handle:
return hapax(text_converter(file_handle.read()))
test_text = '''A hapax legomenon (often abbreviated to hapax)
is a word which occurs only once in either the
written record of a language, the works of an author,
or in a single text. Define a function that given
the file name of a text will return all its hapaxes.
Make sure your program ignores capitalization.'''
test_results = hapax(text_converter(test_text))
assert 'legomenon' in test_results
assert 'word' in test_results
assert not 'a' in test_results
| krzyszti/my_projects | Python/Exercises/36.py | Python | mit | 1,098 |
import os
from re import match, sub
from datetime import datetime
from pkg_resources import resource_listdir
from cmsis_svd.parser import SVDParser
from functools import reduce
def find_for_mcu(mcu):
mcu = mcu.lower()
vendors = resource_listdir("cmsis_svd", "data")
for vendor in vendors:
fnames = resource_listdir("cmsis_svd", "data/%s" % vendor)
for fname in fnames:
filename = fname.lower()
if not filename.endswith(".svd"):
continue
filename = filename[:-4]
if mcu.startswith(filename):
return vendor, fname
for fname in fnames:
filename = fname.lower()
if not filename.endswith(".svd"):
continue
filename = "^%s.*" % filename[:-4].replace('x', '.')
if match(filename, mcu):
return vendor, fname
return None, None
def load_svd_for_mcu(mcu):
vendor, filename = find_for_mcu(mcu)
if (vendor is None) or (filename is None):
return None
return SVDParser.for_packaged_svd(vendor, filename).get_device()
#parser = SVDParser.for_mcu(mcu)
#if parser is None:
# return None
#return parser.get_device()
GENERATED_FILE_HEADER = """/* This file generated at %s by mcu-info-util from SVD description. */
"""
def generate_header(mcu, filename=None):
svd = load_svd_for_mcu(mcu)
if not svd:
return False
if not filename:
return True
f = open(filename, "w")
f.write(GENERATED_FILE_HEADER % datetime.now().strftime('%x %X'))
include_guard = "__%s__" % (os.path.basename(filename).replace('.', '_').upper())
f.write("#ifndef %s\n" % (include_guard))
f.write("#define %s\n" % (include_guard))
f.write("\n")
f.write("#include <stdint.h>\n")
f.write("\n")
for periph in svd.peripherals:
f.write("#define %s_BASE %s\n" % (periph.name, hex(periph.base_address)))
f.write("\n")
for periph in svd.peripherals:
f.write("/* %s: %s */\n" % (periph.name, periph.description))
f.write("\n")
f.write("typedef struct %s_regs_t {\n" % periph.name)
regs = []
for reg in periph.registers:
regs.append([reg.address_offset, sub(r"[\[\]]", "", reg.name), reg.size])
regs.sort()
_regs = regs
regs = []
for reg in _regs:
if len(regs) and (regs[-1][0] == reg[0]):
assert regs[-1][2] == reg[2]
if isinstance(regs[-1][1], str):
regs[-1][1] = [regs[-1][1]]
regs[-1][1] += [reg[1]]
else:
regs.append(reg)
reseved_index = 0
offset = 0
for reg in regs:
skip_count = reg[0] - offset
assert skip_count >= 0, "%s_%s %s %s %s" % (periph.name, reg[1], offset, reg[0], regs)
if skip_count:
f.write("\tuint8_t RESERVED_%s[%s];\n" % (reseved_index, skip_count))
reseved_index += 1
offset += skip_count
item_size = (reg[2] + 7) // 8
if isinstance(reg[1], str):
f.write("\tuint%s_t %s;\n" % (item_size * 8, reg[1]))
else:
f.write("\tunion {\n")
for name in reg[1]:
f.write("\t\tuint%s_t %s;\n" % (item_size * 8, name))
f.write("\t};\n")
offset += item_size
f.write("} %s_regs_t;\n" % periph.name)
f.write("\n")
f.write("#define %s_regs ((volatile %s_regs_t*)%s_BASE)\n" % (periph.name, periph.name, periph.name))
f.write("\n")
for interrupt in periph.interrupts:
f.write("#define %s_IRQ %s\n" % (interrupt.name, interrupt.value))
f.write("\n")
for reg in periph.registers:
f.write("/* %s_%s: %s */\n" % (periph.name, sub(r"[\[\]]", "", reg.name), reg.description))
f.write("#define %s_%s (*((volatile uint%s_t*)(%s_BASE + %s)))\n" %
(periph.name, sub(r"[\[\]]", "", reg.name), reg.size, periph.name, reg.address_offset))
f.write("\n")
for field in reg.fields:
f.write("/* %s_%s_%s: %s */\n" % (periph.name, sub(r"[\[\]]", "", reg.name), field.name, field.description))
if field.bit_width == 1:
f.write("#define %s_%s_%s (1 << %s)\n" % (periph.name, sub(r"[\[\]]", "", reg.name),
field.name, field.bit_offset))
else:
f.write("#define %s_%s_%s_OFFSET %s\n" % (periph.name, sub(r"[\[\]]", "", reg.name),
field.name, field.bit_offset))
f.write("#define %s_%s_%s_WIDTH %s\n" % (periph.name, sub(r"[\[\]]", "", reg.name),
field.name, field.bit_width))
f.write("#define %s_%s_%s_MASK %s\n" %
(periph.name, sub(r"[\[\]]", "", reg.name), field.name,
hex(((2 ** field.bit_width) - 1) << field.bit_offset)))
f.write("\n")
f.write("\n")
interrupts = []
for periph in svd.peripherals:
for interrupt in periph.interrupts:
interrupts.append([interrupt.value, interrupt.name])
interrupts.sort()
_interrupts = interrupts
interrupts = []
for interrupt in _interrupts:
if len(interrupts) and (interrupts[-1][0] == interrupt[0]):
continue
else:
interrupts.append(interrupt)
f.write("#ifndef ISR\n")
f.write("#ifdef __cplusplus\n")
f.write("#define ISR(name) extern \"C\" void name(void)\n")
f.write("#else\n")
f.write("#define ISR(name) void name(void)\n")
f.write("#endif\n")
f.write("#endif\n")
f.write("\n")
f.write("ISR(empty_handler);\n")
for interrupt in interrupts:
f.write("ISR(%s_handler);\n" % interrupt[1])
f.write("\n")
f.write("#ifdef DEFINE_IRQ_HANDLERS\n")
f.write("\n")
f.write("ISR(empty_handler) {}\n")
f.write("\n")
for interrupt in interrupts:
f.write("#pragma weak %s_handler=empty_handler\n" % interrupt[1])
f.write("\n")
f.write("#define IRQ_HANDLERS \\\n")
index = 0
for interrupt in interrupts:
skip_count = interrupt[0] - index
assert skip_count >= 0
while skip_count > 0:
f.write("\t0,\\\n")
skip_count -= 1
index += 1
f.write("\t%s_handler,\\\n" % interrupt[1])
index += 1
f.write("\t0\n")
f.write("\n")
f.write("#endif\n")
f.write("\n")
f.write("#endif\n")
f.close()
return True
| KivApple/mcu-info-util | mcu_info_util/svd_utils.py | Python | mit | 6,803 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Universidade de Aveiro, DETI/IEETA, Bioinformatics Group - http://bioinformatics.ua.pt/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from questionnaire.models import Questionnaire, Choice, Question, QuestionSet
import os
import re
import json
from openpyxl import Workbook, load_workbook
from openpyxl.styles import Style, PatternFill, Alignment, Font, Border, Side
from openpyxl.cell import Cell
from openpyxl.worksheet.datavalidation import DataValidation, ValidationType
from searchengine.search_indexes import convert_text_to_slug
from searchengine.models import Slugs
from questionnaire.utils import *
from fingerprint.models import Answer, AnswerChange
import datetime
from django.db import transaction
from Levenshtein import ratio
from qprocessors.choice import choice_list, serialize_list
"""This class is used to import the fingerprint template
"""
class ImportQuestionnaire(object):
def __init__(self, file_path):
self.file_path = file_path
def import_questionnaire(self, merge=None):
raise NotImplementedError("Please Implement this method")
def writeLog(self, log):
pass
#with open("log_%s.txt" % datetime.datetime.now().strftime("%Y%m%d-%H%M%S"), "w") as f:
# f.write(log)
# f.close()
def get_slug(self, slug, questionnaire):
return next_free_slug(slug, create=False, scope=questionnaire)
def format_number(self, number):
# print number
number_arr = number.split(".")
result = number_arr[0] + "."
for i in range(1,len(number_arr)):
val = int(number_arr[i])
if val<10:
val = "0" + str(val)
number_arr[i] = str(val)
if (i!=len(number_arr)-1):
result += str(val) + "."
else:
result += str(val)
# print "result " + result
return result
"""This method will build the object according with the type
of the object to import.
"""
@staticmethod
def factory(t_type, file_path):
if t_type == "excel":
return ImportQuestionnaireExcel(file_path)
else:
raise Exception("The supplied format is not supported")
class CommentPlaceholder:
value='comment'
class ImportQuestionnaireExcel(ImportQuestionnaire):
QUESTION=0
CATEGORY=1
# choice match mode
EXACT_MATCH=0
SIMILARITY_MODE=1
def __init__(self, file_path):
ImportQuestionnaire.__init__(self, file_path)
# this function implements the similarity algorithm, baed on a levenstein similarity algorithm
# the idea is this algorithm be dynamically defined, for now its static
def __isSimilar(self, comparing_option, options, percentage):
closest = 0
match = None
for option in options:
this_ratio = ratio(comparing_option,option)
#print "'%s' and '%s' is %r similar" %(comparing_option, option, this_ratio)
if this_ratio > closest and this_ratio > percentage:
closest = this_ratio
match = option
return (closest, match)
def __handleAnswerChanges(self, question, change_map, debug=False):
if len(change_map) > 0:
#print "ANSWERS:"
def __answerChange(data, change_map):
response = choice_list(data).values()
for res in response:
try:
res['key'] = change_map[res['key']]
except KeyError:
pass
return serialize_list(response)
# Handle answer changes modifications
answers = Answer.objects.filter(question=question)
for ans in answers:
ans.data = __answerChange(ans.data, change_map)
if not debug:
ans.save()
# Update answer history
# For old values
ans_history = AnswerChange.objects.filter(answer=ans)
for hist in ans_history:
hist.old_value = __answerChange(hist.old_value, change_map)
hist.new_value = __answerChange(hist.new_value, change_map)
if not debug:
hist.save()
# Update dependencies too
'''print "UPDATE DEPENDENCIES FOR: %s" %(question.number)
for key, value in change_map:
print key
print value
print "--"
dependent_questions = Question.objects.filter(checks__contains='dependent="%s,%s"' % (question.number, key))
print "QUESTIONS DEPENDENT"
print dependent_questions
'''
#raise Exception("CRAZY")
def __processChoices(self, row, question, list_aux, log, mode=EXACT_MATCH, match_percentage=0.75, debug=False, infer_function=None):
#_choices_array_aux=[]
i = 0
# get current questions if any
old_choices = list(Choice.objects.filter(question=question).values_list('value', flat=True))
#print old_choices
change_map={} # maps the changes to be made over the question
indexes_map = {} # since ordering is absolute, on first passthrough i generate the sortid numbers
look_similar = [] # potential matches for 2nd pass similarity lookup
maybe_new = [] # for 3rd pass we may infer
# 1st pass: we do a first pass through to remove exact matches (is the same on both approaches)
for ch in list_aux:
i+=1
indexes_map[ch] = i
if ch in old_choices:
choice = Choice.objects.get(question=question, value=ch)
choice.sortid = i
if not debug:
choice.save()
#_choices_array_aux.append(ch)
old_choices.remove(ch)
else:
look_similar.append(ch)
def __similarMap(question, similar, ch):
change_map[similar] = ch
choice=Choice.objects.get(question=question, value=similar)
choice.text_en = ch
choice.value = ch
choice.sortid=indexes_map[ch]
if not debug:
choice.save()
#_choices_array_aux.append(ch)
old_choices.remove(similar)
# 2nd pass: lets analyse the rest that are not exact matches
for ch in look_similar:
if mode==self.SIMILARITY_MODE:
(closest, similar) = self.__isSimilar(ch, old_choices, match_percentage)
# if considered similar
if similar != None:
if closest < 1:
print "Replacing '%r' which is %r similar to '%r' on question %r" % (similar, closest, ch, question.number)
__similarMap(question, similar, ch)
else:
# maybe be new, to check on 3rd pass
maybe_new.append(ch)
# if this is exact match mode, we skip this step
else:
maybe_new.append(ch)
# 3rd pass: if there's an boolean lambda infer function to non obvious cases dealing, run it
run = list(maybe_new)
if infer_function != None and len(maybe_new) > 0 and len(old_choices) > 0:
for new in run:
print "RUN for " + str(new)
if old_choices > 0:
for old in old_choices:
if infer_function(question.number, new, old) == True:
print "Replacing '%r' which is user indicated similar to '%r' on question %r" % (old, new, question.number)
__similarMap(question, old, new)
maybe_new.remove(new)
#if we find a hit its done
break
else:
print "No more old choices, others must be new"
for ch in maybe_new:
# otherwise we create a new entry
print "Create new '%s'" %(ch)
try:
choice = Choice(question=question, sortid=indexes_map[ch], text_en=ch, value=ch)
log += '\n%s - Choice created %s ' % (row, choice)
if not debug:
choice.save()
#_choices_array_aux.append(ch)
log += '\n%s - Choice saved %s ' % (row, choice)
except:
log += "\n%s - Error to save Choice %s" % (row, choice)
self.writeLog(log)
raise
if len(old_choices)> 0:
print "REMOVED:"
print old_choices
# at last, we must remove the choices that dont appear in the new listing (considered removed)
Choice.objects.filter(question=question, value__in=old_choices).delete()
if mode==self.SIMILARITY_MODE:
self.__handleAnswerChanges(question, change_map, debug=debug)
return list_aux #_choices_array_aux
def __processDisposition(self, disposition):
if disposition == 'horizontal':
return 1
elif disposition == 'dropdown':
return 2
return 0
def __handleQuestionNumber(self, level, qNumber, questionset):
questionNumber = None
if level.startswith('h'):
questionNumber = qNumber.getNumber(level)
questionNumber = self.format_number(str(questionNumber))
else:
questionNumber = level
pos = level.split('.')
poslen = len(pos)
for question in questionset.questions():
this_q = question.number.split('.')
if poslen == len(this_q):
if pos[poslen-1] <= this_q[poslen-1]:
n = int(this_q[poslen-1])+1
if n < 10:
this_q[poslen-1] = '0'+str(n)
else:
this_q[poslen-1] = str(n)
question.number = ".".join(this_q)
question.save()
#raise Exception('STOP THERE')
return questionNumber
def __getChoices(self, question):
''' Gets the choices_array from a question back into an choices_array.
Useful on merge operation that point dependencies to questions already on the database
'''
if question.type in ['choice', 'choice-freeform', 'choice-multiple', 'choice-multiple-freeform']:
choices = Choice.objects.get(question=question).values_list('value', flat=true)
return choices
elif question.type in ['choice-yesno', 'choice-yesnodontknow']:
return ['yes', 'no', 'dontknow']
return []
def __handleQuestion(self, type, row,type_Column, level_number_column, text_question_Column, _questions_rows,
_choices_array, qNumber, questionset, log, _checks, _debug, questionnaire, mode=EXACT_MATCH, percentage=0.75, infer_function=None):
try:
slug = None
text_en = None
if level_number_column.value.startswith('h'):
text_en = str(level_number_column.value) + '. ' + str(text_question_Column.value)
else:
level = len(level_number_column.value.split('.'))-1
text_en = 'h%s. %s' % (str(level),str(text_question_Column.value))
dataType_column = None
if type == self.CATEGORY:
dataType_column = CommentPlaceholder()
else:
dataType_column = row[3]
if row[7].value:
slug = row[7].value
else:
slug = convert_text_to_slug(str(row[1].value)[:50])
slug = self.get_slug(slug, questionnaire)
if row[5].value:
helpText = row[5].value
else:
helpText = ''
_tooltip = False
if row[6].value:
if str(row[6].value).lower() == 'yes':
_tooltip = True
#If has dependencies
if row[8].value:
try:
dependencies_list = row[8]
list_dep_aux = dependencies_list.value.split('|')
question_num_parent = None
try:
question_num_parent = _questions_rows.get(list_dep_aux[0]).number
except AttributeError:
''' If this is a merge, the dependant question can already be on the questionset,
lets try looking for it
'''
try:
question = Question.objects.get(slug_fk__slug1=list_dep_aux[0],
questionset=questionset)
_questions_rows[list_dep_aux[0]] = question_num_parent
question_num_parent = question.number
_choices_array[list_dep_aux[0]] = self.__getChoices(question)
except Question.DoesNotExist:
raise Exception('The dependant with slug %s does not exist.' %(list_dep_aux[0]))
index_aux = int(str(list_dep_aux[1]))-1
choice_parent_list = _choices_array.get(list_dep_aux[0])
choice_parent = choice_parent_list[index_aux]
_checks = 'dependent=\"%s,%s\"' % (str(question_num_parent), str(choice_parent))
except:
raise
try:
questionNumber = self.__handleQuestionNumber(level_number_column.value, qNumber, questionset)
except:
if type==self.QUESTION:
log += "\n%s - Error to create question number %s" % (type_Column.row, text_en)
elif type==self.CATEGORY:
log += "\n%s - Error to create Category number %s" % (type_Column.row, text_en)
self.writeLog(log)
raise
#print slug
#Create or load slug
slugs = Slugs.objects.filter(slug1=slug, description=text_en)
if len(slugs) <= 0:
slug_db = Slugs(slug1=slug, description=text_en)
slug_db.save()
else:
slug_db = slugs[0]
visible_default = False
if row[10].value:
if str(row[10].value).lower() == 'visible':
visible_default = True
is_category=None
is_stats=None
if type==self.QUESTION:
is_stats=True
is_category=False
elif type==self.CATEGORY:
is_stats=False
is_category=True
try:
question = Question.objects.get(slug_fk__slug1=slug_db.slug1, questionset=questionset)
question.text_en=text_en
question.number=str(questionNumber)
question.type=dataType_column.value
question.help_text=helpText
question.stats=is_stats
question.category=is_category
question.tooltip=_tooltip
question.checks=_checks
question.visible_default=visible_default
except Question.DoesNotExist:
question = Question(questionset=questionset, text_en=text_en, number=str(questionNumber),
type=dataType_column.value, help_text=helpText, slug=slug, slug_fk=slug_db, stats=is_stats,
category=is_category, tooltip=_tooltip,
checks=_checks, visible_default=visible_default,
disposition=self.__processDisposition(row[11].value.lower()))
if dataType_column.value in ['open-validated']:
ardict = {}
if row[4].value:
# some basic types dont need regex
known_validations = {
"integer": "[+-]?\d+",
"decimal": "[+-]?\d*([.]\d*)?",
"scientific": "[+-]?\d*([.]\d*)?e[+-]?\d*([.]\d*)?",
"range": "[+\-]?\d*([.]\d*);[+\-]?\d*([.]\d*)",
"date": "\d{2}/\d{2}/\d{4}",
"time": "\d{2}:\d{2}:\d{2}",
"datetime": "\d{2}/\d{2}/\d{4} \d{2}:\d{2}:\d{2}",
"text": ".*"
}
try:
ardict['regex'] = known_validations[row[4].value]
ardict['base'] = row[4].value
except KeyError:
# If this is not known, try to validate it as a regex
try:
re.compile(row[4].value)
ardict['regex'] = row[4].value
except re.error:
raise Exception("--ERROR: The regex on row %d, column 4 is not valid" % (type_Column.row))
if row[5].value:
split = row[5].value.split('|')
lensplit = len(split)
if lensplit == 1:
ardict['unit'] = split[0]
question.help_text=""
elif lensplit == 2:
ardict['unit'] = split[0]
ardict['unit_desc'] = split[1]
question.help_text=""
elif lensplit == 3:
ardict['unit'] = split[0]
ardict['unit_desc'] = split[1]
question.help_text = split[2]
else:
raise Exception("-- ERROR: Invalid number of segments on help text row %d, column 5. Max syntax is unit|desc|help_text" % (type_Column.row))
question.metadata = json.dumps(ardict)
if not _debug:
question.save()
if type==self.QUESTION:
log += '\n%s - Question created %s ' % (type_Column.row, question)
elif type==self.CATEGORY:
log += '\n%s - Category created %s ' % (type_Column.row, question)
_questions_rows[slug] = question
if type == self.QUESTION:
if dataType_column.value in ['choice', 'choice-freeform', 'choice-multiple', 'choice-multiple-freeform']:
_choices_array_aux = []
# Parse of values list
values_list = row[4]
if (values_list!=None and values_list.value!=None):
list_aux = values_list.value.split('|')
_choices_array[slug] = self.__processChoices(type_Column.row, question, list_aux, log, debug=_debug,
mode=mode, match_percentage=percentage, infer_function=infer_function)
if dataType_column.value in ['choice-yesno',
'choice-yesnodontknow']:
_choices_array[slug] = ['yes', 'no', 'dontknow']
except:
log += "\n%s - Error to save question %s" % (type_Column.row, text_en)
self.writeLog(log)
raise
@transaction.commit_on_success
def import_questionnaire(self, merge=None, mode=EXACT_MATCH, percentage=0.75, infer_function=None):
_debug = False
qNumber = QuestionNumber()
slugs = []
wb = load_workbook(filename = self.file_path, data_only=True)
ws = wb.get_active_sheet()
log = ''
# Cell B1: Name of questionnaire
name = ws.cell('B1').value
slugQ = convert_text_to_slug(ws.cell('B1').value)
disable = False
questionnaire = None
if merge != None:
try:
questionnaire = Questionnaire.objects.get(id=merge)
except Questionnaire.DoesNotExist:
raise Exception('Questionnaire does not exist, so cant merge against it.')
else:
questionnaire = Questionnaire(name=name, disable=disable, slug=slugQ, redirect_url='/')
log += '\nQuestionnaire created %s ' % questionnaire
if not _debug:
questionnaire.save()
log += '\nQuestionnaire saved %s ' % questionnaire
try:
_choices_array = {}
_questions_rows = {}
#############################
# TIPS:
# Type of Row: QuestionSet, Category, Question
# Columns: Type, Text/Question, Level/Number, Data type, Value list, Help text/Description, Tooltip, Slug, Stats
#############################
for row in ws.rows[2:]:
if len(row) > 0 and row[0].value != None:
type_Column = row[0]
text_question_Column = row[1]
if (text_question_Column.value!=None):
text_question_Column.value = text_question_Column.value.encode('ascii', 'ignore')
level_number_column = row[2]
_checks = ''
# Type = QUESTIONSET
# Columns required: Type, Text/Question
# Columns optional: Help text/Description, Tooltip
if str(type_Column.value) == "QuestionSet":
sortid = str(level_number_column.value)
try:
qNumber.getNumber('h0', sortid)
except:
self.writeLog(log)
raise
text_en = 'h1. %s' % text_question_Column.value
slug_qs = None
if row[7].value:
slug_qs = row[7].value
else:
slug_qs = str(slugQ) + "_" + convert_text_to_slug(str(text_question_Column.value))
if row[5].value:
helpText = row[5].value
else:
helpText = ""
tooltip = False
if row[6].value:
if str(row[6].value).lower() == 'yes':
tooltip = True
questionset = None
created = False
try:
questionset = QuestionSet.objects.get(questionnaire=questionnaire, sortid=sortid, heading=slug_qs)
except QuestionSet.DoesNotExist:
questionset = QuestionSet(questionnaire=questionnaire, sortid=sortid, heading=slug_qs, checks='required', text_en=text_en, help_text=helpText, tooltip=tooltip)
created=True
if created:
log += '\n%s - QuestionSet created %s - %s ' % (type_Column.row, sortid, text_en)
else:
log += '\n%s - QuestionSet retrieved %s - %s ' % (type_Column.row, sortid, text_en)
try:
if not _debug:
questionset.save()
log += '\n%s - QuestionSet saved %s - %s ' % (type_Column.row, sortid, text_en)
except:
log += "\n%s - Error to save questionset %s - %s" % (type_Column.row, sortid, text_en)
self.writeLog(log)
raise
#if not created:
# last_question = Question.objects.filter(questionset=questionset).order_by('-id')[0]
# qNumber.setState(last_question.number)
# Type = CATEGORY
# Columns required: Type, Text/Question, Level/Number, Category
# Columns optional: Help text/Description, Slug, Tooltip, Dependencies
elif str(type_Column.value) == "Category":
self.__handleQuestion(self.CATEGORY, row, type_Column, level_number_column, text_question_Column,
_questions_rows, _choices_array, qNumber, questionset, log, _checks, _debug,
questionnaire, mode=mode, percentage=percentage, infer_function=infer_function)
# Type = QUESTION
# Columns required: Type, Text/Question, Level/Number, Data Type, Category, Stats
# Columns optional: Value List, Help text/Description, Tooltip, Dependencies
else:
self.__handleQuestion(self.QUESTION, row, type_Column, level_number_column, text_question_Column,
_questions_rows, _choices_array, qNumber, questionset, log, _checks, _debug,
questionnaire, mode=mode, percentage=percentage, infer_function=infer_function)
except:
log += '\nError to save questionsets and questions of the questionnaire %s ' % questionnaire
self.writeLog(log)
raise
log += '\nQuestionnaire %s, questionsets, questions and choices created with success!! ' % questionnaire
self.writeLog(log)
#raise Exception('Dont commit me dude')
return True
| bioinformatics-ua/catalogue | emif/questionnaire/imports.py | Python | gpl-3.0 | 26,425 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.