content
stringlengths 5
1.05M
|
---|
import numpy as np
def correct_to_01(X, epsilon=1.0e-10):
X[np.logical_and(X < 0, X >= 0 - epsilon)] = 0
X[np.logical_and(X > 1, X <= 1 + epsilon)] = 1
return X
def _shape_mixed(x, A=5.0, alpha=1.0):
aux = 2.0 * A * np.pi
ret = np.power(1.0 - x - (np.cos(aux * x + 0.5 * np.pi) / aux), alpha)
return correct_to_01(ret)
def _calculate(x, s, h):
return x[:, -1][:, None] + s * np.column_stack(h)
# variables
n_obj = 3
n_var = 8
y = np.array([0.1, 0.2, 0.3 ,0.4, -0.1, -0.2, -0.3, -0.4])
s = np.arange(2, 2 * n_obj + 1, 2)
h = np.array([1,2,3])
print(y,s,h)
print(y[:, -1][:,None])
# print(_calculate(y,s, h))
|
import numpy as np
import torch
from mlprogram.actions import (
ActionSequence,
ApplyRule,
CloseVariadicFieldRule,
ExpandTreeRule,
GenerateToken,
NodeConstraint,
NodeType,
)
from mlprogram.encoders import ActionSequenceEncoder, Samples
from mlprogram.languages import Token
class TestEncoder(object):
def test_reserved_labels(self):
encoder = ActionSequenceEncoder(Samples([], [], []), 0)
assert 2 == len(encoder._rule_encoder.vocab)
assert 1 == len(encoder._token_encoder.vocab)
def test_encode_raw_value(self):
encoder = ActionSequenceEncoder(
Samples([], [],
[("", "foo"), ("x", "foo")]),
0)
assert [1, 2] == encoder.encode_raw_value("foo")
assert [0] == encoder.encode_raw_value("bar")
def test_encode_action(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, True)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("op", NodeType("value", NodeConstraint.Token, True)),
("arg0",
NodeType("value", NodeConstraint.Token, True)),
("arg1",
NodeType("value", NodeConstraint.Token, True))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, True),
NodeType("expr", NodeConstraint.Node, True)],
[("", "f"), ("", "2")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(funcdef))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(GenerateToken("", "1"))
action_sequence.eval(GenerateToken("", "2"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
action = encoder.encode_action(action_sequence,
[Token("", "1", "1"),
Token("", "2", "2")])
assert np.array_equal(
[
[-1, 2, -1, -1],
[2, -1, 1, -1],
[2, -1, -1, 0],
[2, -1, 2, 1],
[2, 1, -1, -1],
[3, -1, -1, -1]
],
action.numpy()
)
def test_encode_parent(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, True)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("op", NodeType("value", NodeConstraint.Token, True)),
("arg0",
NodeType("value", NodeConstraint.Token, True)),
("arg1",
NodeType("value", NodeConstraint.Token, True))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, True),
NodeType("expr", NodeConstraint.Node, False)],
[("", "f"), ("", "2")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(funcdef))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(GenerateToken("", "1"))
action_sequence.eval(GenerateToken("", "2"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
parent = encoder.encode_parent(action_sequence)
assert np.array_equal(
[
[-1, -1, -1, -1],
[1, 2, 0, 0],
[1, 2, 0, 0],
[1, 2, 0, 0],
[1, 2, 0, 0],
[1, 2, 0, 1]
],
parent.numpy()
)
def test_encode_tree(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, True)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("op", NodeType("value", NodeConstraint.Token, True)),
("arg0",
NodeType("value", NodeConstraint.Token, True)),
("arg1",
NodeType("value", NodeConstraint.Token, True))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, True),
NodeType("expr", NodeConstraint.Node, False)],
[("", "f"), ("", "2")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(funcdef))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(GenerateToken("", "1"))
d, m = encoder.encode_tree(action_sequence)
assert np.array_equal(
[0, 1, 1], d.numpy()
)
assert np.array_equal(
[[0, 1, 1], [0, 0, 0], [0, 0, 0]],
m.numpy()
)
def test_encode_empty_sequence(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, False)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("op", NodeType("value", NodeConstraint.Token, False)),
("arg0",
NodeType("value", NodeConstraint.Token, False)),
("arg1",
NodeType("value", NodeConstraint.Token, False))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, False),
NodeType("expr", NodeConstraint.Node, False)],
[("", "f")]),
0)
action_sequence = ActionSequence()
action = encoder.encode_action(action_sequence, [Token("", "1", "1")])
parent = encoder.encode_parent(action_sequence)
d, m = encoder.encode_tree(action_sequence)
assert np.array_equal(
[
[-1, -1, -1, -1]
],
action.numpy()
)
assert np.array_equal(
[
[-1, -1, -1, -1]
],
parent.numpy()
)
assert np.array_equal(np.zeros((0,)), d.numpy())
assert np.array_equal(np.zeros((0, 0)), m.numpy())
def test_encode_invalid_sequence(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, True)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("op", NodeType("value", NodeConstraint.Token, False)),
("arg0",
NodeType("value", NodeConstraint.Token, True)),
("arg1",
NodeType("value", NodeConstraint.Token, True))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, True),
NodeType("expr", NodeConstraint.Node, True)],
[("", "f")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(funcdef))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(GenerateToken("", "1"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
assert encoder.encode_action(action_sequence,
[Token("", "2", "2")]) is None
def test_encode_completed_sequence(self):
none = ExpandTreeRule(NodeType("value", NodeConstraint.Node, False),
[])
encoder = ActionSequenceEncoder(
Samples([none],
[NodeType("value", NodeConstraint.Node, False)],
[("", "f")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(none))
action = encoder.encode_action(action_sequence, [Token("", "1", "1")])
parent = encoder.encode_parent(action_sequence)
assert np.array_equal(
[
[-1, 2, -1, -1],
[-1, -1, -1, -1]
],
action.numpy()
)
assert np.array_equal(
[
[-1, -1, -1, -1],
[-1, -1, -1, -1]
],
parent.numpy()
)
def test_decode(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, True)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("op", NodeType("value", NodeConstraint.Token, True)),
("arg0",
NodeType("value", NodeConstraint.Token, True)),
("arg1",
NodeType("value", NodeConstraint.Token, True))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, True),
NodeType("expr", NodeConstraint.Node, False)],
[("", "f")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(funcdef))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(GenerateToken("", "1"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
expected_action_sequence = ActionSequence()
expected_action_sequence.eval(ApplyRule(funcdef))
expected_action_sequence.eval(GenerateToken("", "f"))
expected_action_sequence.eval(GenerateToken("", "1"))
expected_action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
result = encoder.decode(encoder.encode_action(
action_sequence, [Token(None, "1", "1")])[: -1, 1:],
[Token(None, "1", "1")])
assert \
expected_action_sequence.action_sequence == result.action_sequence
def test_decode_invalid_tensor(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, False)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("op", NodeType("value", NodeConstraint.Token, False)),
("arg0",
NodeType("value", NodeConstraint.Token, False)),
("arg1",
NodeType("value", NodeConstraint.Token, False))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, False),
NodeType("expr", NodeConstraint.Node, False)],
[("", "f")]),
0)
assert encoder.decode(torch.LongTensor([[-1, -1, -1]]), []) is None
assert encoder.decode(torch.LongTensor([[-1, -1, 1]]), []) is None
def test_encode_each_action(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, True)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("constant",
NodeType("value", NodeConstraint.Token, True))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, True),
NodeType("expr", NodeConstraint.Node, False),
NodeType("expr", NodeConstraint.Node, True)],
[("", "f"), ("", "2")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(funcdef))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(GenerateToken("", "1"))
action_sequence.eval(GenerateToken("", "2"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
action_sequence.eval(ApplyRule(expr))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
action = encoder.encode_each_action(
action_sequence,
[Token("", "1", "1"), Token("", "2", "2")],
1)
assert np.array_equal(
np.array([
[[1, -1, -1], [2, -1, -1]], # funcdef
[[-1, -1, -1], [-1, 1, -1]], # f
[[-1, -1, -1], [-1, -1, 0]], # 1
[[-1, -1, -1], [-1, 2, 1]], # 2
[[-1, -1, -1], [-1, -1, -1]], # CloseVariadicField
[[3, -1, -1], [2, -1, -1]], # expr
[[-1, -1, -1], [-1, 1, -1]], # f
[[-1, -1, -1], [-1, -1, -1]], # CloseVariadicField
[[-1, -1, -1], [-1, -1, -1]] # CloseVariadicField
], dtype=np.long),
action.numpy()
)
def test_encode_path(self):
funcdef = ExpandTreeRule(
NodeType("def", NodeConstraint.Node, False),
[("name",
NodeType("value", NodeConstraint.Token, True)),
("body",
NodeType("expr", NodeConstraint.Node, True))])
expr = ExpandTreeRule(
NodeType("expr", NodeConstraint.Node, False),
[("constant",
NodeType("value", NodeConstraint.Token, True))])
encoder = ActionSequenceEncoder(
Samples([funcdef, expr],
[NodeType("def", NodeConstraint.Node, False),
NodeType("value", NodeConstraint.Token, True),
NodeType("expr", NodeConstraint.Node, True)],
[("", "f"), ("", "2")]),
0)
action_sequence = ActionSequence()
action_sequence.eval(ApplyRule(funcdef))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(GenerateToken("", "1"))
action_sequence.eval(GenerateToken("", "2"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
action_sequence.eval(ApplyRule(expr))
action_sequence.eval(GenerateToken("", "f"))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
action_sequence.eval(ApplyRule(CloseVariadicFieldRule()))
path = encoder.encode_path(action_sequence, 2)
assert np.array_equal(
np.array([
[-1, -1], # funcdef
[2, -1], # f
[2, -1], # 1
[2, -1], # 2
[2, -1], # CloseVariadicField
[2, -1], # expr
[3, 2], # f
[3, 2], # CloseVariadicField
[2, -1], # CloseVariadicField
], dtype=np.long),
path.numpy()
)
path = encoder.encode_path(action_sequence, 1)
assert np.array_equal(
np.array([
[-1], # funcdef
[2], # f
[2], # 1
[2], # 2
[2], # CloseVariadicField
[2], # expr
[3], # f
[3], # CloseVariadicField
[2], # CloseVariadicField
], dtype=np.long),
path.numpy()
)
|
import sys
depth = int(input("What is the well's depth? "))
jump = int(input("Enter the height the frog can jump up: "))
slip = int(input("Enter the height the frog slips down: "))
step = depth
day = 1
if jump-slip<=0 and jump<depth:
print("The frog will never escape from the well.")
sys.exit()
while step>0:
step = step-jump
if step <= 0:
break
print(f"On day {day} the frog leaps to the depth of {step} meters.")
step = step+slip
print(f"At night he slips down to the depth of {step} meters.")
day+=1
print(f"The frog gets out of the well on day {day}.") |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyIncremental(PythonPackage):
"""A small library that versions your Python projects."""
homepage = "https://github.com/twisted/incremental"
pypi = "incremental/incremental-21.3.0.tar.gz"
version('21.3.0', sha256='02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57')
depends_on('py-setuptools', type='build')
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 3 08:50:32 2019
@author: Thiago
"""
import numpy as np
import pylab as pl
#%%
#
# Modelo paramétrico
#
def profit_(P, L, R, C, H):
return L*R*P - (H+L*C)
#%%
#
# Geração de entradas aleatorias
#
#numero de iterações
N = 10**5
P = np.random.uniform(47, 53, size=N)
L = np.random.uniform(1200, 1800, size=N)
R = np.random.uniform(0.01, 0.05, size=N)
C = np.random.uniform(0.2, 0.5, size=N)
H = 800
#%%
#
# Avaliação do modelo
#
profit = profit_(P, L, R, C, H)
#%%
#
# Loop Montecarlo
#
#%%
#
# Analise dos resultados
#
pl.hist(profit, bins=100)
pl.axvline(0, c='r',lw=3)
pl.axvline(2200, c='g',lw=3)
profit_mean = profit.mean()
pl.axvline(profit_mean, c='y',lw=3)
#profit_cum = np.cumsum(pl.histogram(profit, bins=100)[0])
#pl.plot(profit_cum, c='k')
pl.show()
print(u'Lucro médio = '+str(profit_mean))
print(u'Prob. de insucesso = '+str(sum(profit < 0)/N))
print(u'Prob. profit > 2200 = '+str(sum(profit > 2200)/N))
|
import torch.nn as nn
import torch.nn.functional as func
class Flatten(nn.Module):
def forward(self, x):
"""Flatten non-batch dimensions."""
return x.view(x.shape[0], -1)
class FullyConnected(nn.Module):
def __init__(self, num_features, hidden_size, dropout):
super(FullyConnected, self).__init__()
self.flatten = Flatten()
self.fc = nn.Linear(num_features, hidden_size)
self.dropout = nn.Dropout(dropout)
self.output = nn.Linear(hidden_size, 1)
def forward(self, x):
x = self.flatten(x)
x = func.relu(self.fc(x))
x = self.dropout(x)
x = func.softmax(self.output(x), dim=-1)
return x
|
from swarm import Swarm
from status_types import Status
|
# -*- coding: UTF-8 -*-
from django.db import connection
from django.utils.deprecation import MiddlewareMixin
class AuditMiddleware(MiddlewareMixin):
"""
Has to be placed after Django AuthMiddleware.
"""
def process_request(self, request):
cursor = connection.cursor()
# alternative https://www.postgresql.org/docs/9.6/functions-admin.html
cursor.execute('CREATE TEMPORARY TABLE IF NOT EXISTS _user_tmp (user_id integer);')
cursor.execute('INSERT INTO "_user_tmp" VALUES (%s);', [request.user.id or None])
def process_response(self, request, response):
cursor = connection.cursor()
cursor.execute('DROP TABLE IF EXISTS _user_tmp;')
return response
|
""" Test chemistry attributes
:Author: Jonathan Karr <[email protected]>
:Date: 2017-05-10
:Copyright: 2017, Karr Lab
:License: MIT
"""
from obj_tables import core
from wc_utils.util import chem
import bcforms
import bpforms
import lark.exceptions
import obj_tables.chem
import openbabel
import unittest
class ChemicalFormulaAttributeTestCase(unittest.TestCase):
def test(self):
attr = obj_tables.chem.ChemicalFormulaAttribute()
primary_attr = obj_tables.chem.ChemicalFormulaAttribute(primary=True, unique=True)
self.assertEqual(attr.default, None)
attr = obj_tables.chem.ChemicalFormulaAttribute(default='C1H1O2')
self.assertEqual(attr.default, chem.EmpiricalFormula('C1H1O2'))
attr = obj_tables.chem.ChemicalFormulaAttribute(default=chem.EmpiricalFormula('C1H1O2'))
self.assertEqual(attr.default, chem.EmpiricalFormula('C1H1O2'))
class Node(core.Model):
value = obj_tables.chem.ChemicalFormulaAttribute()
attr = Node.Meta.attributes['value']
# deserialize
self.assertEqual(attr.deserialize(''), (None, None))
self.assertEqual(attr.deserialize(None), (None, None))
self.assertEqual(attr.deserialize('X'), (chem.EmpiricalFormula('X'), None))
self.assertEqual(attr.deserialize('x')[0], None)
self.assertNotEqual(attr.deserialize('x')[1], None)
# serialize
self.assertEqual(attr.serialize(''), '')
self.assertEqual(attr.serialize(None), '')
self.assertEqual(attr.serialize(chem.EmpiricalFormula('C1HO2')), 'CHO2')
# deserialize + serialize
self.assertEqual(attr.serialize(attr.deserialize('')[0]), '')
self.assertEqual(attr.serialize(attr.deserialize(None)[0]), '')
self.assertEqual(attr.serialize(attr.deserialize('CHO2')[0]), 'CHO2')
# validate
node = Node()
self.assertEqual(attr.validate(node, None), None)
self.assertEqual(attr.validate(node, chem.EmpiricalFormula('C1HO2')), None)
self.assertNotEqual(attr.validate(node, ''), None)
self.assertNotEqual(attr.validate(node, 'x'), None)
self.assertNotEqual(attr.validate(node, 1), None)
attr2 = obj_tables.chem.ChemicalFormulaAttribute(primary=True)
self.assertEqual(attr.validate(None, None), None)
self.assertEqual(attr.validate(None, chem.EmpiricalFormula('C')), None)
self.assertNotEqual(attr2.validate(None, None), None)
self.assertEqual(attr2.validate(None, chem.EmpiricalFormula('C')), None)
# validate_unique
nodes = [Node(), Node()]
self.assertEqual(attr.validate_unique(nodes, [chem.EmpiricalFormula('CHO2'), chem.EmpiricalFormula('C2HO2')]), None)
self.assertNotEqual(attr.validate_unique(nodes, [chem.EmpiricalFormula('CHO2'), chem.EmpiricalFormula('C1HO2')]), None)
# to/from JSON
self.assertEqual(attr.to_builtin(None), None)
self.assertEqual(attr.to_builtin(''), None)
self.assertEqual(attr.to_builtin(chem.EmpiricalFormula('CHO2')), {'C': 1, 'H': 1, 'O': 2})
self.assertEqual(attr.to_builtin(chem.EmpiricalFormula('C1HO2')), {'C': 1, 'H': 1, 'O': 2})
self.assertEqual(attr.from_builtin(None), None)
self.assertEqual(attr.from_builtin(''), None)
self.assertEqual(attr.from_builtin('CHO2'), chem.EmpiricalFormula('CHO2'))
self.assertEqual(attr.from_builtin('C1HO2'), chem.EmpiricalFormula('CHO2'))
self.assertEqual(attr.from_builtin({'C': 1, 'H': 1, 'O': 2}), chem.EmpiricalFormula('CHO2'))
self.assertEqual(attr.from_builtin({'C': 1, 'H': 1, 'O': 2}), chem.EmpiricalFormula('C1HO2'))
# get_xlsx_validation
attr.get_xlsx_validation()
primary_attr.get_xlsx_validation()
class ChemicalStructureTestCase(unittest.TestCase):
def test__init__None(self):
s = obj_tables.chem.ChemicalStructure()
self.assertEqual(s._value, None)
self.assertEqual(s._serialized_format, None)
self.assertEqual(s._serialized_value, None)
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_format, None)
self.assertEqual(s.serialized_value, None)
def test__init__str(self):
serialized_value = 'O'
s = obj_tables.chem.ChemicalStructure('{}:{}'.format('smiles', serialized_value))
conv = openbabel.OBConversion()
conv.SetOutFormat('smiles')
conv.SetOptions('c', conv.OUTOPTIONS)
self.assertEqual(conv.WriteString(s.value, True), serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(s.serialized_value, serialized_value)
def test__init__openbabel(self):
mol = openbabel.OBMol()
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(s.value, mol)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(s.serialized_value, None)
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(s.value, mol)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(s.serialized_value, None)
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.inchi)
self.assertEqual(s.value, mol)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.inchi)
self.assertEqual(s.serialized_value, None)
with self.assertRaisesRegex(ValueError, 'must be consistent with `value'):
obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.bpforms)
def test__init__bpforms(self):
mol = bpforms.DnaForm()
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(s.value, mol)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bpforms)
self.assertEqual(s.serialized_value, None)
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.bpforms)
self.assertEqual(s.value, mol)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bpforms)
self.assertEqual(s.serialized_value, None)
mol2 = bpforms.BpForm()
with self.assertRaisesRegex(ValueError, 'BpForms must use one of the defined alphabets'):
obj_tables.chem.ChemicalStructure(mol2)
with self.assertRaisesRegex(ValueError, 'must be consistent with `value`'):
obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.bcforms)
def test__init__bcforms(self):
mol = bcforms.BcForm()
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(s.value, mol)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bcforms)
self.assertEqual(s.serialized_value, None)
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.bcforms)
self.assertEqual(s.value, mol)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bcforms)
self.assertEqual(s.serialized_value, None)
with self.assertRaisesRegex(ValueError, 'must be consistent with `value`'):
obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.bpforms)
def test__init__unsupported_type(self):
with self.assertRaisesRegex(ValueError, 'Unable to set `value`'):
obj_tables.chem.ChemicalStructure(1)
def test__init__inconsistent_format(self):
with self.assertRaisesRegex(ValueError, 'must be consistent with `value`'):
obj_tables.chem.ChemicalStructure(None, obj_tables.chem.ChemicalStructureFormat.inchi)
def test_set_value(self):
s = obj_tables.chem.ChemicalStructure()
s.value = openbabel.OBMol()
s.value = bpforms.DnaForm()
s.value = openbabel.OBMol()
s.value = bcforms.BcForm()
def test_to_dict_None(self):
s = obj_tables.chem.ChemicalStructure()
self.assertEqual(s.to_dict(), {
'format': None,
'value': None,
})
def test_to_dict_openbabel(self):
mol = openbabel.OBMol()
conv = openbabel.OBConversion()
conv.SetInFormat('smi')
conv.ReadString(mol, 'O')
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(s.to_dict(), {
'format': 'smiles',
'value': 'O',
})
s.serialized_format = obj_tables.chem.ChemicalStructureFormat.inchi
self.assertEqual(s.to_dict(), {
'format': 'inchi',
'value': 'InChI=1S/H2O/h1H2',
})
s._serialized_value = 'XXX'
self.assertEqual(s.to_dict(), {
'format': 'inchi',
'value': 'XXX',
})
def test_to_dict_bpforms(self):
seq = 'ACGT'
mol = bpforms.DnaForm().from_str(seq)
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(s.to_dict(), {
'format': 'bpforms/dna',
'value': seq,
})
self.assertEqual(s.to_dict(), {
'format': 'bpforms/dna',
'value': seq,
})
def test_to_dict_bcforms(self):
serialized_value = '2 * a + 3 * b'
mol = bcforms.BcForm().from_str(serialized_value)
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(s.to_dict(), {
'format': 'bcforms',
'value': serialized_value,
})
def test_serialize_openbabel(self):
serialized_value = 'InChI=1S/H2O/h1H2'
mol = openbabel.OBMol()
conv = openbabel.OBConversion()
conv.SetInFormat('inchi')
conv.ReadString(mol, serialized_value)
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.inchi)
self.assertEqual(s.serialize(), '{}: {}'.format('inchi', serialized_value))
def test_serialize_bpforms(self):
seq = 'ACGU'
mol = bpforms.RnaForm().from_str(seq)
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(s.serialize(), '{}/{}: {}'.format('bpforms', 'rna', seq))
def test_serialize_bcforms(self):
serialized_value = '2 * a + 3 * b'
mol = bcforms.BcForm().from_str(serialized_value)
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(s.serialize(), '{}: {}'.format('bcforms', serialized_value))
def test_from_dict_none(self):
s = obj_tables.chem.ChemicalStructure()
s.from_dict({})
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_value, None)
self.assertEqual(s.serialized_format, None)
s.from_dict({'format': None})
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_value, None)
self.assertEqual(s.serialized_format, None)
s.from_dict({'value': None})
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_value, None)
self.assertEqual(s.serialized_format, None)
s.from_dict({'format': None, 'value': None})
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_value, None)
self.assertEqual(s.serialized_format, None)
s.from_dict({'format': None, 'value': ''})
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_value, None)
self.assertEqual(s.serialized_format, None)
with self.assertRaisesRegex(ValueError, 'key must be defined'):
s.from_dict({'value': 'O'})
def test_from_dict_openbabel(self):
serialized_value = 'O'
s = obj_tables.chem.ChemicalStructure()
s.from_dict({'format': 'smiles', 'value': serialized_value})
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.smiles)
conv = openbabel.OBConversion()
conv.SetOutFormat('smiles')
conv.SetOptions('c', conv.OUTOPTIONS)
self.assertEqual(conv.WriteString(s.value, True), serialized_value)
serialized_value = 'InChI=1S/H2O/h1H2'
s = obj_tables.chem.ChemicalStructure()
s.from_dict({'format': 'inchi', 'value': serialized_value})
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.inchi)
conv = openbabel.OBConversion()
conv.SetOutFormat('inchi')
self.assertEqual(conv.WriteString(s.value, True), serialized_value)
def test_from_dict_bpforms(self):
serialized_value = 'ACDE'
s = obj_tables.chem.ChemicalStructure()
s.from_dict({'format': 'bpforms/protein', 'value': serialized_value})
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bpforms)
self.assertEqual(s.value.alphabet, bpforms.protein_alphabet)
self.assertEqual(str(s.value), serialized_value)
def test_from_dict_bcforms(self):
serialized_value = '2 * a'
s = obj_tables.chem.ChemicalStructure()
s.from_dict({'format': 'bcforms', 'value': serialized_value})
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bcforms)
self.assertEqual(str(s.value), serialized_value)
def test_deserialize_none(self):
s = obj_tables.chem.ChemicalStructure()
s.deserialize(None)
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_format, None)
self.assertEqual(s.serialized_value, None)
s.deserialize('')
self.assertEqual(s.value, None)
self.assertEqual(s.serialized_format, None)
self.assertEqual(s.serialized_value, None)
def test_deserialize_openbabel(self):
serialized_value = 'O'
s = obj_tables.chem.ChemicalStructure()
s.deserialize('{}: {}'.format('smiles', serialized_value))
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.smiles)
conv = openbabel.OBConversion()
conv.SetOutFormat('smiles')
conv.SetOptions('c', conv.OUTOPTIONS)
self.assertEqual(conv.WriteString(s.value, True), serialized_value)
serialized_value = 'InChI=1S/H2O/h1H2'
s = obj_tables.chem.ChemicalStructure()
s.deserialize('{}: {}'.format('inchi', serialized_value))
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.inchi)
conv = openbabel.OBConversion()
conv.SetOutFormat('inchi')
self.assertEqual(conv.WriteString(s.value, True), serialized_value)
def test_deserialize_bpforms(self):
serialized_value = 'ACDE'
s = obj_tables.chem.ChemicalStructure()
s.deserialize('{}/{}: {}'.format('bpforms', 'protein', serialized_value))
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bpforms)
self.assertEqual(s.value.alphabet, bpforms.protein_alphabet)
self.assertEqual(str(s.value), serialized_value)
def test_deserialize_bcforms(self):
serialized_value = '2 * a'
s = obj_tables.chem.ChemicalStructure()
s.deserialize('{}: {}'.format('bcforms', serialized_value))
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.bcforms)
self.assertEqual(str(s.value), serialized_value)
class ChemicalStructureAttributeTestCase(unittest.TestCase):
def test__init__(self):
attr = obj_tables.chem.ChemicalStructureAttribute()
primary_attr = obj_tables.chem.ChemicalStructureAttribute(primary=True, unique=True)
def test_deserialize(self):
attr = obj_tables.chem.ChemicalStructureAttribute()
serialized_value = 'O'
return_value = attr.deserialize('{}: {}'.format('smiles', serialized_value))
self.assertEqual(return_value[0].serialized_format, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(return_value[0].serialized_value, serialized_value)
conv = openbabel.OBConversion()
conv.SetOutFormat('smiles')
conv.SetOptions('c', conv.OUTOPTIONS)
self.assertEqual(conv.WriteString(return_value[0].value, True), serialized_value)
self.assertEqual(return_value[1], None)
self.assertEqual(attr.deserialize('O')[0], None)
self.assertNotEqual(attr.deserialize('O')[1], None)
self.assertEqual(attr.deserialize(''), (None, None))
self.assertEqual(attr.deserialize(None), (None, None))
self.assertEqual(attr.deserialize(1)[0], None)
self.assertNotEqual(attr.deserialize(1)[1], None)
def test_serialize(self):
attr = obj_tables.chem.ChemicalStructureAttribute()
serialized_value = 'O'
mol = openbabel.OBMol()
conv = openbabel.OBConversion()
conv.SetInFormat('smiles')
conv.ReadString(mol, serialized_value)
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(attr.serialize(s), '{}: {}'.format('smiles', serialized_value))
self.assertEqual(attr.serialize('',), '')
self.assertEqual(attr.serialize(None), '')
def test_validate(self):
attr = obj_tables.chem.ChemicalStructureAttribute()
primary_attr = obj_tables.chem.ChemicalStructureAttribute(primary=True, unique=True)
mol = bpforms.DnaForm().from_str('AC')
s = obj_tables.chem.ChemicalStructure(mol)
self.assertEqual(attr.validate(None, s), None)
self.assertEqual(attr.validate(None, None), None)
self.assertNotEqual(attr.validate(None, ''), None)
self.assertNotEqual(attr.validate(None, 1), None)
self.assertNotEqual(primary_attr.validate(None, None), None)
def test_validate_unique(self):
attr = obj_tables.chem.ChemicalStructureAttribute(primary=True, unique=True)
mol1 = bpforms.DnaForm().from_str('AC')
mol2 = bpforms.DnaForm().from_str('GT')
mol3 = bpforms.DnaForm().from_str('AC')
s1 = obj_tables.chem.ChemicalStructure(mol1)
s2 = obj_tables.chem.ChemicalStructure(mol2)
s3 = obj_tables.chem.ChemicalStructure(mol3)
self.assertEqual(attr.validate_unique(None, [s1, s2]), None)
self.assertNotEqual(attr.validate_unique(None, [s1, s3]), None)
def test_to_builtin(self):
attr = obj_tables.chem.ChemicalStructureAttribute()
serialized_value = 'O'
mol = openbabel.OBMol()
conv = openbabel.OBConversion()
conv.SetInFormat('smiles')
conv.ReadString(mol, serialized_value)
s = obj_tables.chem.ChemicalStructure(mol, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(attr.to_builtin(s), {
'format': 'smiles',
'value': serialized_value,
})
self.assertEqual(attr.to_builtin(None), None)
self.assertEqual(attr.to_builtin(''), None)
def test_from_builtin(self):
attr = obj_tables.chem.ChemicalStructureAttribute()
serialized_value = 'O'
s = attr.from_builtin({'format': 'smiles', 'value': serialized_value})
conv = openbabel.OBConversion()
conv.SetOutFormat('smiles')
conv.SetOptions('c', conv.OUTOPTIONS)
self.assertEqual(conv.WriteString(s.value, True), serialized_value)
self.assertEqual(s.serialized_format, obj_tables.chem.ChemicalStructureFormat.smiles)
self.assertEqual(s.serialized_value, serialized_value)
self.assertEqual(attr.from_builtin(None), None)
self.assertEqual(attr.from_builtin(''), None)
def test_get_xlsx_validation(self):
attr = obj_tables.chem.ChemicalStructureAttribute()
attr.get_xlsx_validation()
attr = obj_tables.chem.ChemicalStructureAttribute(primary=True, unique=True)
attr.get_xlsx_validation()
class ReactionEquationAttributeTestCase(unittest.TestCase):
def test_ReactionParticipant(self):
class Node(obj_tables.core.Model):
id = obj_tables.core.StringAttribute(unique=True, primary=True)
part = obj_tables.chem.ReactionParticipant('A', 'c', 1.)
part2 = obj_tables.chem.ReactionParticipant('A', 'c', 1.)
part3 = obj_tables.chem.ReactionParticipant(Node(id='A'), Node(id='c'), 1.)
part4 = obj_tables.chem.ReactionParticipant('B', 'c', 1.)
part5 = obj_tables.chem.ReactionParticipant('A', 'e', 1.)
part6 = obj_tables.chem.ReactionParticipant('A', 'c', 2.)
part7 = obj_tables.chem.ReactionParticipant('A', 'c', 2.2)
self.assertTrue(part.is_equal(part))
self.assertTrue(part.is_equal(part2))
self.assertFalse(part.is_equal(part3))
self.assertFalse(part.is_equal(part4))
self.assertFalse(part.is_equal(part5))
self.assertFalse(part.is_equal(part6))
self.assertFalse(part.is_equal(part7))
self.assertEqual(part.to_dict(), {
"species": "A",
"compartment": "c",
"stoichiometry": 1.,
})
self.assertEqual(part3.to_dict(), {
"species": "A",
"compartment": "c",
"stoichiometry": 1.,
})
self.assertEqual(part.serialize(include_compartment=False), 'A')
self.assertEqual(part.serialize(include_compartment=True), 'A[c]')
self.assertEqual(part3.serialize(include_compartment=False), 'A')
self.assertEqual(part3.serialize(include_compartment=True), 'A[c]')
self.assertEqual(part6.serialize(include_compartment=False), '(2) A')
self.assertEqual(part6.serialize(include_compartment=True), '(2) A[c]')
self.assertEqual(part7.serialize(include_compartment=False), '(2.2) A')
self.assertEqual(part7.serialize(include_compartment=True), '(2.2) A[c]')
def test_ReactionEquation(self):
class Node(obj_tables.core.Model):
id = obj_tables.core.StringAttribute(unique=True, primary=True)
species = {
'A': Node(id='A'),
'B': Node(id='B'),
'C': Node(id='C'),
}
compartments = {
'c': Node(id='c'),
'e': Node(id='e'),
}
rxn = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'c', 1.),
])
rxn2 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'c', 1.),
])
rxn3 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.)
])
rxn4 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'c', 2.),
])
rxn5 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'e', 1.),
])
rxn6 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -3.3),
obj_tables.chem.ReactionParticipant('B', 'e', 2.),
])
rxn7 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant(species['A'], compartments['c'], -3.3),
obj_tables.chem.ReactionParticipant(species['B'], compartments['e'], 2.),
obj_tables.chem.ReactionParticipant(species['C'], compartments['e'], 1.7),
])
rxn8 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -3.3),
obj_tables.chem.ReactionParticipant('B', 'e', 2.),
obj_tables.chem.ReactionParticipant('C', 'e', 1.7),
])
rxn9 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -2.),
obj_tables.chem.ReactionParticipant('B', 'c', 4.),
])
self.assertTrue(rxn.is_equal(rxn))
self.assertTrue(rxn.is_equal(rxn2))
self.assertFalse(rxn.is_equal(None))
self.assertFalse(rxn.is_equal(rxn3))
self.assertFalse(rxn.is_equal(rxn4))
self.assertFalse(rxn.is_equal(rxn5))
self.assertFalse(rxn.is_equal(rxn6))
self.assertFalse(rxn.is_equal(rxn7))
self.assertFalse(rxn.is_equal(rxn8))
self.assertFalse(rxn.is_equal(rxn9))
self.assertEqual(rxn.to_dict(), [
{
"species": "A",
"compartment": "c",
"stoichiometry": -1.,
},
{
"species": "B",
"compartment": "c",
"stoichiometry": 1.,
},
])
self.assertEqual(rxn.serialize(), '[c]: A <=> B')
self.assertEqual(rxn5.serialize(), 'A[c] <=> B[e]')
self.assertEqual(rxn6.serialize(), '(3.3) A[c] <=> (2) B[e]')
self.assertEqual(rxn7.serialize(), '(3.3) A[c] <=> (1.7) C[e] + (2) B[e]')
self.assertEqual(rxn8.serialize(), '(3.3) A[c] <=> (1.7) C[e] + (2) B[e]')
self.assertTrue(obj_tables.chem.ReactionEquation().deserialize('[c]: A <=> B').is_equal(rxn))
self.assertTrue(obj_tables.chem.ReactionEquation().deserialize('[c]: (2) A <=> (4) B').is_equal(rxn9))
self.assertTrue(obj_tables.chem.ReactionEquation().deserialize('A[c] <=> B[e]').is_equal(rxn5))
self.assertTrue(obj_tables.chem.ReactionEquation().deserialize('(3.3) A[c] <=> (2) B[e]').is_equal(rxn6))
self.assertTrue(obj_tables.chem.ReactionEquation().deserialize('(3.3) A[c] <=> (1.7) C[e] + (2) B[e]').is_equal(rxn8))
rxn10 = obj_tables.chem.ReactionEquation().deserialize('(3.3) A[c] <=> (1.7) C[e] + (2) B[e]', species, compartments)
self.assertTrue(rxn10.is_equal(rxn7))
with self.assertRaisesRegex(lark.exceptions.VisitError, 'must be defined'):
obj_tables.chem.ReactionEquation().deserialize('(3.3) D[c] <=> (1.7) F[e] + (2) E[e]', species, compartments)
with self.assertRaisesRegex(lark.exceptions.VisitError, 'must be defined'):
obj_tables.chem.ReactionEquation().deserialize('(3.3) A[d] <=> (1.7) C[f] + (2) B[f]', species, compartments)
with self.assertRaisesRegex(lark.exceptions.VisitError, 'Reaction participants cannot be repeated'):
obj_tables.chem.ReactionEquation().deserialize('(3.3) A[c] + (3.3) A[c] <=> (1.7) C[e] + (2) B[e]')
def test_ReactionEquationAttribute(self):
class Species(obj_tables.core.Model):
id = obj_tables.core.StringAttribute(unique=True, primary=True)
class Compartment(obj_tables.core.Model):
id = obj_tables.core.StringAttribute(unique=True, primary=True)
attr = obj_tables.chem.ReactionEquationAttribute()
not_none_attr = obj_tables.chem.ReactionEquationAttribute(none=False, unique=True, description="")
rxn = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'c', 1.),
])
rxn2 = obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'c', 2.),
])
self.assertEqual(attr.validate(None, rxn), None)
self.assertEqual(attr.validate(None, None), None)
self.assertNotEqual(attr.validate(None, 1), None)
self.assertNotEqual(not_none_attr.validate(None, None), None)
self.assertEqual(attr.validate_unique(None, [rxn, rxn2]), None)
self.assertNotEqual(attr.validate_unique(None, [rxn, rxn]), None)
self.assertEqual(attr.serialize(None), '')
self.assertEqual(attr.serialize(rxn), '[c]: A <=> B')
self.assertEqual(attr.deserialize(None), (None, None))
self.assertEqual(attr.deserialize(''), (None, None))
self.assertTrue(attr.deserialize('[c]: A <=> B')[0].is_equal(
obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'c', 1.),
]
)))
self.assertIsInstance(attr.deserialize('[c] A <=> B')[1], obj_tables.InvalidAttribute)
objects = {
Species: {
'A': Species(id='A'),
'B': Species(id='B'),
},
Compartment: {
'c': Compartment(id='c'),
}
}
obj_attr = obj_tables.chem.ReactionEquationAttribute(species_cls=Species, compartment_cls=Compartment)
rxn3, _ = obj_attr.deserialize('[c]: A <=> B', objects)
self.assertTrue(rxn3.is_equal(
obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant(objects[Species]['A'], objects[Compartment]['c'], -1.),
obj_tables.chem.ReactionParticipant(objects[Species]['B'], objects[Compartment]['c'], 1.),
])
))
obj_attr = obj_tables.chem.ReactionEquationAttribute(species_cls='Species', compartment_cls='Compartment')
rxn3, _ = obj_attr.deserialize('[c]: A <=> B', objects)
self.assertTrue(rxn3.is_equal(
obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant(objects[Species]['A'], objects[Compartment]['c'], -1.),
obj_tables.chem.ReactionParticipant(objects[Species]['B'], objects[Compartment]['c'], 1.),
])
))
obj_attr = obj_tables.chem.ReactionEquationAttribute(
species_cls=Species.__module__ + '.' + Species.__name__,
compartment_cls=Compartment.__module__ + '.' + Compartment.__name__)
rxn3, _ = obj_attr.deserialize('[c]: A <=> B', objects)
self.assertTrue(rxn3.is_equal(
obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant(objects[Species]['A'], objects[Compartment]['c'], -1.),
obj_tables.chem.ReactionParticipant(objects[Species]['B'], objects[Compartment]['c'], 1.),
])
))
obj_attr = obj_tables.chem.ReactionEquationAttribute(species_cls=Species, compartment_cls=Compartment)
rxn3, _ = obj_attr.deserialize('[c]: A <=> B')
self.assertTrue(rxn3.is_equal(
obj_tables.chem.ReactionEquation([
obj_tables.chem.ReactionParticipant('A', 'c', -1.),
obj_tables.chem.ReactionParticipant('B', 'c', 1.),
])
))
obj_attr = obj_tables.chem.ReactionEquationAttribute(species_cls='Species', compartment_cls='Comp')
with self.assertRaisesRegex(ValueError, 'Unable to resolve class'):
obj_attr.deserialize('[c]: A <=> B', objects=objects)
obj_attr = obj_tables.chem.ReactionEquationAttribute(species_cls='Spec', compartment_cls='Compartment')
with self.assertRaisesRegex(ValueError, 'Unable to resolve class'):
obj_attr.deserialize('[c]: A <=> B', objects=objects)
self.assertEqual(attr.to_builtin(None), None)
self.assertEqual(attr.to_builtin(rxn), [
{
"species": "A",
"compartment": "c",
"stoichiometry": -1.,
},
{
"species": "B",
"compartment": "c",
"stoichiometry": 1.,
},
])
with self.assertRaisesRegex(NotImplementedError, 'Cannot be converted from JSON'):
attr.from_builtin(None)
attr.get_xlsx_validation()
not_none_attr.get_xlsx_validation()
|
from typing import ClassVar, List, Optional
from ...constants import ApiKey, ResourceType
from ..base import RequestData
class Config:
name: str
config_operation: int
value: Optional[str]
def __init__(self, name: str, config_operation: int, value: Optional[str]):
"""
:param name: The configuration key name.
:type name: str
:param config_operation: The type (Set, Delete, Append, Subtract) of operation.
:type config_operation: int
:param value: The value to set for the configuration key.
:type value: Optional[str]
"""
self.name = name
self.config_operation = config_operation
self.value = value
class Resource:
resource_type: ResourceType
resource_name: str
configs: List[Config]
def __init__(self, resource_type: ResourceType, resource_name: str, configs: List[Config]):
"""
:param resource_type: The resource type.
:type resource_type: ResourceType
:param resource_name: The resource name.
:type resource_name: str
:param configs: The configurations.
:type configs: List[Config]
"""
self.resource_type = resource_type
self.resource_name = resource_name
self.configs = configs
class IncrementalAlterConfigsRequestData(RequestData):
resources: List[Resource]
validate_only: bool
api_key: ClassVar[ApiKey] = ApiKey.INCREMENTAL_ALTER_CONFIGS
def __init__(self, resources: List[Resource], validate_only: bool):
"""
:param resources: The incremental updates for each resource.
:type resources: List[Resource]
:param validate_only: True if we should validate the request, but not change the configurations.
:type validate_only: bool
"""
self.resources = resources
self.validate_only = validate_only
|
import random
import datetime
import time
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets, linear_model
from sklearn.ensemble import RandomForestRegressor, BaggingRegressor, RandomForestClassifier
from sklearn.svm import SVC,SVR,LinearSVC
from sklearn.naive_bayes import GaussianNB
from sklearn import pipeline, grid_search
from sklearn.decomposition import TruncatedSVD
from sklearn import decomposition
from sklearn import preprocessing
from sklearn import tree
# from sklearn.lda import LDA
from sklearn import neighbors
random.seed(1729)
start_time = time.time()
date = str(datetime.datetime.now().strftime(format='%m%d'))
print("::Start time- ", datetime.datetime.now())
train = pd.read_csv('Train.csv', encoding="ISO-8859-1")#[:1000]
test = pd.read_csv('Test.csv', encoding="ISO-8859-1")#[:1000]
train_count = train.shape[0]
train_test = pd.concat((train, test), axis=0,ignore_index=True)
train_test['Cust_status'] = train_test['Cust_status'].map(lambda x: 2 if x=='Old' else 1)
train_test['Trans24'] = train_test['Trans24'].map(lambda x: 2 if x == 'Enable' else 1)
train_test['Trans25'] = train_test['Trans25'].map(lambda x: 2 if x == 'Enable' else 1)
train_test['Trans26'] = train_test['Trans26'].map(lambda x: 2 if x == 'Enable' else 1)
train_test['Trans27'] = train_test['Trans27'].map(lambda x: 2 if x == 'Enable' else 1)
cols = list(train.columns.values)
cols.remove('Cust_id')
cols.remove('Active_Customer')
for i in cols:
train_test[i] = train_test[i].fillna(train_test[i].mean(axis=0))
trans = [col for col in cols if 'Trans' in col]
food = [col for col in cols if 'Food' in col]
promotion = [col for col in cols if 'Promotion' in col]
sum = 0
for i in train_test:
for j in trans:
sum = sum + train_test[j]
train_test['trans_avg'] = sum/41
sum = 0
for i in train_test:
for j in food:
sum = sum + train_test[j]
train_test['trans_avg'] = sum/164
sum = 0
for i in train_test:
for j in promotion:
sum = sum + train_test[j]
train_test['trans_avg'] = sum/48
train_test = train_test.drop(np.concatenate((trans, food, promotion),axis=0).ravel(), axis=1)
print(train_test.shape)
test_id = train_test['Cust_id'][train_count:]
Y_train = train_test['Active_Customer'][:train_count].values
train_test = train_test.drop(['Cust_id','Active_Customer'],axis=1)
X_train = train_test[:train_count]
X_test = train_test[train_count:]
# Logistic Regression - Fine
# clf = linear_model.LogisticRegression(verbose=1,n_jobs=-1)
# clf.fit(X_train,Y_train)
# b = clf.coef_
# b0 = clf.intercept_
# print(clf.score(X_train,Y_train))
# Y_pred = clf.predict(X_test)
# Random Forest Regression - Good one
clf = RandomForestRegressor(n_estimators=600, n_jobs=-1, random_state=2016, verbose=1)
clf.fit(X_train,Y_train)
Y_pred = clf.predict(X_test)
# Random Forest Classifier - less Good one
# clf = RandomForestClassifier(n_estimators=250, max_features=0.1,bootstrap=True,
# n_jobs=-1, random_state=2016, verbose=1, oob_score=False)
# clf.fit(X_train,Y_train)
# Y_pred = clf.predict(X_test)
# clf = neighbors.KNeighborsClassifier(1, 'uniform')
# clf.fit(X_train,Y_train)
# Y_pred = clf.predict(X_test)
logistic = linear_model.LogisticRegression(verbose=1,n_jobs=-1,solver='sag',tol=0.01)
# forest_classify = RandomForestClassifier( max_features=0.1,bootstrap=False,
# n_jobs=-1, random_state=2016, verbose=1, oob_score=False)
# forest_classify = RandomForestRegressor(n_jobs=-1, random_state=2016, verbose=1)
# pca = decomposition.PCA()
stdscalar = preprocessing.MinMaxScaler()
#
# n_components = [100, 150, 200, 250]
# n_estimators = [100, 200]
Cs = np.logspace(0.01,1)
# #tsvd = TruncatedSVD(n_components=200, random_state=2016)
pipe = pipeline.Pipeline([('stdscalar',stdscalar),('logistic', logistic)])
# param_grid = {'rfr__max_features': [1, 0.1, "log2"]}
# model = grid_search.GridSearchCV(pipe, dict(pca__n_components=n_components,
# forest_classify__n_estimators=n_estimators), n_jobs=-1, verbose=1)
model = grid_search.GridSearchCV(pipe, dict(Cs, n_jobs=-1, verbose=1), n_jobs=-1, verbose=1)
model.fit(X_train, Y_train)
print("Best parameters found by grid search:")
print(model.best_params_)
print("Best CV score:")
print(model.best_score_)
#
Y_pred = model.predict(X_test)
print(Y_pred.shape)
# x= []
# for i in Y_pred:
# x.append(round(i))
# print(x)
# print(clf.score(X_train,Y_train))
filename = 'submission_' + date + '.csv'
# pd.DataFrame({"Cust_id":test_id,"Active_Customer":x}).to_csv(filename, index=False)
pd.DataFrame({"Cust_id":test_id,"Active_Customer":Y_pred}).to_csv(filename, index=False)
|
import string
import random
class User:
"""
Class that generates new instances of users
"""
user_log = [] # Empty user log
def __init__(self, user_name, user_password):
'''
__init__ method that helps us define properties for our objects.
Args:
user_name: New user name.
'''
self.user_name = user_name
self.user_password = user_password
def save_user(self):
'''
save_user method saves user objects into user_log
'''
User.user_log.append(self)
class Credentials:
"""
Class that generates new instances of user Credentials
"""
credentials_log = [] # Empty credentials log
user_credentials_log = [] # Empty credential log
def __init__(self, user_name, account_name, account_password):
'''
__init__ method that helps us define properties for our objects.
Args:
user_password: New user_password.
account_name = account_name.
account_password = account_password.
'''
self.user_name = user_name
self.account_name = account_name
self.account_password = account_password
def save_credentials(self):
'''
save_credentials method saves credentials objects into credentials_log
'''
Credentials.credentials_log.append(self)
def delete_credentials(self):
'''
delete_credentials method deletes a saved credentials from the user_log
'''
Credentials.credentials_log.remove(self)
@classmethod
def display_credentials(cls, user_name):
'''
method that returns list of credentials saved
'''
credentials_log = []
for credential in cls.credentials_log:
if credential.user_name == user_name:
credentials_log.append(credential)
return credentials_log
@classmethod
def find_by_account_name(cls,account_name):
'''
Method that takes in a number and returns a credential that matches that account_name.
Args:
account_name: account_name to search for
Returns :
credential of person that matches the account_name.
'''
for credential in cls.credentials_log:
if credential.account_name == account_name:
return credential
@classmethod
def copy_credential(cls, account_name):
'''
method that copies a credential details
'''
found_credential = Credentials.find_by_account_name(account_name)
return pyperclip.copy(found_credential.credential)
@classmethod
def credential_exist(cls, account_name):
'''
Class method that checks if a credential exists
'''
for credential in cls.credentials_log :
if credential.account_name == account_name:
return True
return False |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2011-2014, Nigel Small
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" The ogm module provides Object to Graph Mapping features similar to ORM
facilities available for relational databases. All functionality is available
through the :py:class:`Store` class which is bound to a specific
:py:class:`neo4j.GraphDatabaseService` instance on creation.
Conceptually, a mapped object "owns" a single node within the graph along with
all of that node's outgoing relationships. These features are managed via a
pair of attributes called `__node__` and `__rel__` which store details of the
mapped node and the outgoing relationships respectively. The only specific
requirement for a mapped object is that it has a nullary constructor which can
be used to create new instances.
The `__node__` attribute holds a :py:class:`neo4j.Node` object which is the
node to which this object is mapped. If the attribute does not exist, or is
:py:const:`None`, the object is considered "unsaved".
The `__rel__` attribute holds a dictionary of outgoing relationship details.
Each key corresponds to a relationship type and each value to a list of
2-tuples representing the outgoing relationships of that type. Within each
2-tuple, the first value holds a dictionary of relationship properties (which
may be empty) and the second value holds the endpoint. The endpoint may be
either a :py:class:`neo4j.Node` instance or another mapped object. Any such
objects which are unsaved will be lazily saved as required by creation of the
relationship itself. The following data structure outline shows an example of
a `__rel__` attribute (where `alice` and `bob` represent other mapped objects::
{
"LIKES": [
({}, alice),
({"since": 1999}, bob)
]
}
To manage relationships, use the :py:func:`Store.relate` and
:py:func:`Store.separate` methods. Neither method makes any calls to the
database and operates only on the local `__rel__` attribute. Changes must be
explicitly saved via one of the available save methods. The
:py:func:`Store.load_related` method loads all objects marked as related by
the `__rel__` attribute.
The code below shows an example of usage::
from py2neo import neo4j, ogm
class Person(object):
def __init__(self, email=None, name=None, age=None):
self.email = email
self.name = name
self.age = age
def __str__(self):
return self.name
graph_db = neo4j.GraphDatabaseService()
store = ogm.Store(graph_db)
alice = Person("[email protected]", "Alice", 34)
store.save_unique("People", "email", alice.email, alice)
bob = Person("[email protected]", "Bob", 66)
carol = Person("[email protected]", "Carol", 42)
store.relate(alice, "LIKES", bob) # these relationships are not saved
store.relate(alice, "LIKES", carol) # until `alice` is saved
store.save(alice)
friends = store.load_related(alice, "LIKES", Person)
print("Alice likes {0}".format(" and ".join(str(f) for f in friends)))
"""
from __future__ import unicode_literals
from . import neo4j
class NotSaved(ValueError):
""" Raised when an object has not been saved but a bound node is required.
"""
pass
class Store(object):
def __init__(self, graph_db):
self.graph_db = graph_db
if self.graph_db.supports_optional_match:
self.__delete_query = ("START a=node({A}) "
"OPTIONAL MATCH a-[r]-b "
"DELETE r, a")
else:
self.__delete_query = ("START a=node({A}) "
"MATCH a-[r?]-b "
"DELETE r, a")
def _assert_saved(self, subj):
try:
node = subj.__node__
if node is None:
raise NotSaved(subj)
except AttributeError:
raise NotSaved(subj)
def _get_node(self, endpoint):
if isinstance(endpoint, neo4j.Node):
return endpoint
if not hasattr(endpoint, "__node__"):
self.save(endpoint)
return endpoint.__node__
def _is_same(self, obj, endpoint):
if isinstance(endpoint, neo4j.Node):
if hasattr(obj, "__node__"):
return endpoint == obj.__node__
else:
return False
else:
return endpoint is obj
def is_saved(self, subj):
""" Return :py:const:`True` if the object `subj` has been saved to
the database, :py:const:`False` otherwise.
:param subj: the object to test
"""
return hasattr(subj, "__node__") and subj.__node__ is not None
def relate(self, subj, rel_type, obj, properties=None):
""" Define a relationship between `subj` and `obj` of type `rel_type`.
This is a local operation only: nothing is saved to the database until
a save method is called. Relationship properties may optionally be
specified.
:param subj: the object bound to the start of the relationship
:param rel_type: the relationship type
:param obj: the object bound to the end of the relationship
:param properties: properties attached to the relationship (optional)
"""
if not hasattr(subj, "__rel__"):
subj.__rel__ = {}
if rel_type not in subj.__rel__:
subj.__rel__[rel_type] = []
subj.__rel__[rel_type].append((properties or {}, obj))
def separate(self, subj, rel_type, obj=None):
""" Remove any relationship definitions which match the criteria
specified. This is a local operation only: nothing is saved to the
database until a save method is called. If no object is specified, all
relationships of type `rel_type` are removed.
:param subj: the object bound to the start of the relationship
:param rel_type: the relationship type
:param obj: the object bound to the end of the relationship (optional)
"""
if not hasattr(subj, "__rel__"):
return
if rel_type not in subj.__rel__:
return
if obj is None:
del subj.__rel__[rel_type]
else:
subj.__rel__[rel_type] = [
(props, endpoint)
for props, endpoint in subj.__rel__[rel_type]
if not self._is_same(obj, endpoint)
]
def load_related(self, subj, rel_type, cls):
""" Load all nodes related to `subj` by a relationship of type
`rel_type` into objects of type `cls`.
:param subj: the object bound to the start of the relationship
:param rel_type: the relationship type
:param cls: the class to load all related objects into
:return: list of `cls` instances
"""
if not hasattr(subj, "__rel__"):
return []
if rel_type not in subj.__rel__:
return []
return [
self.load(cls, self._get_node(endpoint))
for rel_props, endpoint in subj.__rel__[rel_type]
]
def load(self, cls, node):
""" Load and return an object of type `cls` from database node `node`.
:param cls: the class of the object to be returned
:param node: the node from which to load object data
:return: a `cls` instance
"""
subj = cls()
setattr(subj, "__node__", node)
self.reload(subj)
return subj
def load_indexed(self, index_name, key, value, cls):
""" Load zero or more indexed nodes from the database into a list of
objects.
:param index_name: the node index name
:param key: the index key
:param value: the index value
:param cls: the class of the object to be returned
:return: a list of `cls` instances
"""
index = self.graph_db.get_index(neo4j.Node, index_name)
nodes = index.get(key, value)
return [self.load(cls, node) for node in nodes]
def load_unique(self, index_name, key, value, cls):
""" Load a uniquely indexed node from the database into an object.
:param index_name: the node index name
:param key: the index key
:param value: the index value
:param cls: the class of the object to be returned
:return: as instance of `cls` containing the loaded data
"""
index = self.graph_db.get_index(neo4j.Node, index_name)
nodes = index.get(key, value)
if not nodes:
return None
if len(nodes) > 1:
raise LookupError("Multiple nodes match the given criteria; "
"consider using `load_all` instead.")
return self.load(cls, nodes[0])
def reload(self, subj):
""" Reload properties and relationships from a database node into
`subj`.
:param subj: the object to reload
:raise NotSaved: if `subj` is not linked to a database node
"""
self._assert_saved(subj)
# naively copy properties from node to object
properties = subj.__node__.get_properties()
for key in subj.__dict__:
if not key.startswith("_") and key not in properties:
setattr(subj, key, None)
for key, value in properties.items():
if not key.startswith("_"):
setattr(subj, key, value)
subj.__rel__ = {}
for rel in subj.__node__.match():
if rel.type not in subj.__rel__:
subj.__rel__[rel.type] = []
subj.__rel__[rel.type].append((rel.get_properties(), rel.end_node))
def save(self, subj, node=None):
""" Save an object to a database node.
:param subj: the object to save
:param node: the database node to save to (if omitted, will re-save to
same node as previous save)
"""
if node is not None:
subj.__node__ = node
# naively copy properties from object to node
props = {}
for key, value in subj.__dict__.items():
if not key.startswith("_"):
props[key] = value
if hasattr(subj, "__node__"):
subj.__node__.set_properties(props)
query = neo4j.CypherQuery(self.graph_db, "START a=node({A}) "
"MATCH (a)-[r]->(b) "
"DELETE r")
query.execute(A=subj.__node__._id)
else:
subj.__node__, = self.graph_db.create(props)
# write rels
if hasattr(subj, "__rel__"):
batch = neo4j.WriteBatch(self.graph_db)
for rel_type, rels in subj.__rel__.items():
for rel_props, endpoint in rels:
end_node = self._get_node(endpoint)
if not neo4j.familiar(end_node, self.graph_db):
raise ValueError(end_node)
batch.create((subj.__node__, rel_type, end_node, rel_props))
batch.run()
return subj
def save_indexed(self, index_name, key, value, *subj):
""" Save one or more objects to the database, indexed under the
supplied criteria.
:param index_name: the node index name
:param key: the index key
:param value: the index value
:param subj: one or more objects to save
"""
index = self.graph_db.get_or_create_index(neo4j.Node, index_name)
for subj in subj:
index.add(key, value, self.save(self._get_node(subj)))
def save_unique(self, index_name, key, value, subj):
""" Save an object to the database, uniquely indexed under the
supplied criteria.
:param index_name: the node index name
:param key: the index key
:param value: the index value
:param subj: the object to save
"""
index = self.graph_db.get_or_create_index(neo4j.Node, index_name)
node = index.get_or_create(key, value, {})
self.save(subj, node)
def delete(self, subj):
""" Delete a saved object node from the database as well as all
incoming and outgoing relationships.
:param subj: the object to delete from the database
:raise NotSaved: if `subj` is not linked to a database node
"""
self._assert_saved(subj)
node = subj.__node__
del subj.__node__
neo4j.CypherQuery(self.graph_db,
self.__delete_query).execute(A=node._id)
|
from django.urls import path, include
from .views import UserViewSet , customUserRegister ,custom_user_login
from django.conf import settings
from django.conf.urls.static import static
from rest_framework import routers
router = routers.DefaultRouter()
router.register('users', UserViewSet)
urlpatterns = [
path('', include(router.urls)),
path('users/register',view = customUserRegister, name='user_register'),
path('users/login',view = custom_user_login, name='custom_user_login'),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) |
import heapq
h = []
heapq.heappush(h, (10.1, 0))
heapq.heappush(h, (1.1, 1))
print(h)
assert h == [(1.1, 1), (10.1, 0)]
|
# Copyright 2018 AimBrain Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import os
import argparse
import base64
import numpy as np
import csv
import sys
import h5py
import pandas as pd
import zarr
from tqdm import tqdm
csv.field_size_limit(sys.maxsize)
def features_to_zarr(phase):
FIELDNAMES = ['image_id', 'image_w', 'image_h',
'num_boxes', 'boxes', 'features']
if phase == 'trainval':
infiles = [
'raw/trainval_36/trainval_resnet101_faster_rcnn_genome_36.tsv',
]
elif phase == 'test':
infiles = [
'raw/test2015_36/test2015_resnet101_faster_rcnn_genome_36.tsv',
]
else:
raise SystemExit('Unrecognised phase')
# Read the tsv and append to files
boxes = zarr.open_group(phase + '_boxes.zarr', mode='w')
features = zarr.open_group(phase + '.zarr', mode='w')
image_size = {}
for infile in infiles:
with open(infile, "r") as tsv_in_file:
reader = csv.DictReader(
tsv_in_file, delimiter='\t', fieldnames=FIELDNAMES)
print('Converting ' + infile + ' to zarr...')
for item in tqdm(reader):
item['image_id'] = str(item['image_id'])
item['image_h'] = int(item['image_h'])
item['image_w'] = int(item['image_w'])
item['num_boxes'] = int(item['num_boxes'])
for field in ['boxes', 'features']:
encoded_str = base64.decodestring(
item[field].encode('utf-8'))
item[field] = np.frombuffer(encoded_str,
dtype=np.float32).reshape((item['num_boxes'], -1))
# append to zarr files
boxes.create_dataset(item['image_id'], data=item['boxes'])
features.create_dataset(item['image_id'], data=item['features'])
# image_size dict
image_size[item['image_id']] = {
'image_h':item['image_h'],
'image_w':item['image_w'],
}
# convert dict to pandas dataframe
# create image sizes csv
print('Writing image sizes csv...')
df = pd.DataFrame.from_dict(image_size)
df = df.transpose()
d = df.to_dict()
dw = d['image_w']
dh = d['image_h']
d = [dw, dh]
dwh = {}
for k in dw.keys():
dwh[k] = np.array([d0[k] for d0 in d])
image_sizes = pd.DataFrame(dwh)
image_sizes.to_csv(phase + '_image_size.csv')
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Preprocessing for VQA v2 image data')
parser.add_argument('--data', nargs='+', help='trainval, and/or test, list of data phases to be processed', required=True)
args, unparsed = parser.parse_known_args()
if len(unparsed) != 0:
raise SystemExit('Unknown argument: {}'.format(unparsed))
phase_list = args.data
for phase in phase_list:
# First download and extract
if not os.path.exists(phase + '.zarr'):
print('Converting features tsv to zarr file...')
features_to_zarr(phase)
print('Done')
|
Code:
print(input().replace('1', 'one')) |
lr = 0.0006
optimizer = dict(
name='torch_optimizer',
torch_optim_class='Adam',
lr=lr,
parameters=[
dict(
params='conv_offset',
lr=lr * 0.1 # 1/10 lr for DCNv2 offsets
),
dict(
params='__others__'
)
]
)
|
# Copyright (c) 2020-2022 Adam Karpierz
# Licensed under the zlib/libpng License
# https://opensource.org/licenses/Zlib
from .__about__ import * ; del __about__ # noqa
from ._about import * ; del _about # noqa
|
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/swaps.ipynb (unless otherwise specified).
__all__ = ['get_swap_df', 'lookup_token_name']
# Cell
import pandas as pd
import numpy as np
from .knowledge_graph import Query
from .contracts import whatis
def get_swap_df(skip=None, limit=None):
q = Query()
q.add("MATCH (b:Block)-[]->(:Transaction)-[]->()-[calls:CALLS]->(amm:Contract) ")
q.add("WHERE amm.address='0x98ace08d2b759a265ae326f010496bcd63c15afc'")
q.add("RETURN b.signed_at as signed_at,")
q.add("""
calls._toAmount as to_amount,
calls._fromAmount as from_amount,
calls._toToken as to_token,
calls._fromToken as from_token,
calls._smartToken as smart_token,
calls._trader as trader
""")
if skip:
q.add("SKIP {}".format(skip))
if limit:
q.add("LIMIT {}".format(limit))
swaps = q.data()
df = pd.DataFrame(swaps)
df['signed_at'] = pd.to_datetime(df['signed_at'])
df['to_token'] = df.apply(lambda row: lookup_token_name(row, 'to_token'), axis='columns')
df['from_token'] = df.apply(lambda row: lookup_token_name(row, 'from_token'), axis='columns')
df['trader'] = df.apply(lambda row: lookup_token_name(row, 'trader'), axis='columns')
df['to_amount'] = df.to_amount.astype(np.double)
df['from_amount'] = df.from_amount.astype(np.double)
return df
def lookup_token_name(row, col_name):
address = row[col_name]
matching_tokens = whatis(address)
if matching_tokens:
return matching_tokens[0].name
else:
return address |
from dhis.config import Config
from dhis.server import Server
from dhis.types import *
api=Server()
#api=Server(Config(config_file))
#api=Server(Config(config_url))
#api=Server(Config({..}))
api.get("dataElements")
api.get("dataElements",return_type="request")
api.get("dataElements",return_type="json")
api.get("dataElements",return_type="collection")
api.get("dataElements",fields="id,name",return_type="collection")
api.get("dataElements",filter=["type:eq:int"],return_type="collection")
api.get("dataElements",fields="id,name"","filter=["type:eq:int"],return_type="collection")
## Proposed API declaration
## api.define("dataElements",return_type="collection",params=['fields','filter'],types={'fields': fieldlist,'filter': filter_string})
|
def download():
print("Download specific file.")
def etl():
print("Run full ETL pipeline on file.")
|
from numpy import *
from numpy.random import *
from LabFuncs import *
from Params import *
from HaloFuncs import *
from WIMPFuncs import *
import pandas
# Halo params
HaloModel = SHMpp
v0 = HaloModel.RotationSpeed
v_esc = HaloModel.EscapeSpeed
beta = HaloModel.SausageBeta
sig_beta = HaloModel.SausageDispersionTensor
sig_iso = array([1.0,1.0,1.0])*v0/sqrt(2.0)
# Load shards
df = pandas.read_csv('../data/FitShards_red.csv')
names = df.group_id
nshards = size(names)
velocities = zeros(shape=(nshards,3))
dispersions = zeros(shape=(nshards,3))
velocities[0:(nshards),0] = df.vx # stream velocities
velocities[0:(nshards),1] = df.vy
velocities[0:(nshards),2] = df.vz
dispersions[0:(nshards),0] = df.sigx # dispersion tensors
dispersions[0:(nshards),1] = df.sigy
dispersions[0:(nshards),2] = df.sigz
pops = df.population
Psun = df.Psun
weights = ShardsWeights(names,pops,Psun)
iS1 = 0
iS2 = arange(1,3)
iRet = arange(3,10)
iPro = arange(10,25)
iLowE = arange(25,59)
# v_mins
n = 1000
v_min = linspace(0.01,750.0,n)
# Times
ndays = 100
days = linspace(0.0,365.0-365.0/ndays,ndays)
# Calculate everything
gmin_Iso = zeros(shape=(ndays,n))
gmin_Iso_gf = zeros(shape=(ndays,n))
gmin_Saus = zeros(shape=(ndays,n))
gmin_Saus_gf = zeros(shape=(ndays,n))
gmin_S1 = zeros(shape=(ndays,n))
gmin_S1_gf = zeros(shape=(ndays,n))
gmin_S2 = zeros(shape=(ndays,n))
gmin_S2_gf = zeros(shape=(ndays,n))
gmin_Ret = zeros(shape=(ndays,n))
gmin_Ret_gf = zeros(shape=(ndays,n))
gmin_Pro = zeros(shape=(ndays,n))
gmin_Pro_gf = zeros(shape=(ndays,n))
gmin_LowE = zeros(shape=(ndays,n))
gmin_LowE_gf = zeros(shape=(ndays,n))
for i in range(0,ndays):
gmin_Iso[i,:] = gvmin_Triaxial(v_min,days[i],sig_iso)
gmin_Iso_gf[i,:] = gvmin_Triaxial(v_min,days[i],sig_iso,GravFocus=True)
gmin_Saus[i,:] = gvmin_Triaxial(v_min,days[i],sig_beta)
gmin_Saus_gf[i,:] = gvmin_Triaxial(v_min,days[i],sig_beta,GravFocus=True)
gmin_sub = zeros(shape=(nshards,n))
gmin_sub_gf = zeros(shape=(nshards,n))
for isub in range(0,nshards):
v_s = velocities[isub,:]
sig_s = dispersions[isub,:]
gmin_sub[isub,:] = weights[isub]*gvmin_Triaxial(v_min,days[i],sig_s,v_shift=v_s)
gmin_sub_gf[isub,:] = weights[isub]*gvmin_Triaxial(v_min,days[i],sig_s,v_shift=v_s,GravFocus=True)
gmin_S1[i,:] = gmin_sub[iS1,:]
gmin_S1_gf[i,:] = gmin_sub_gf[iS1,:]
gmin_S2[i,:] = sum(gmin_sub[iS2,:],0)
gmin_S2_gf[i,:] = sum(gmin_sub_gf[iS2,:],0)
gmin_Ret[i,:] = sum(gmin_sub[iRet,:],0)
gmin_Ret_gf[i,:] = sum(gmin_sub_gf[iRet,:],0)
gmin_Pro[i,:] = sum(gmin_sub[iPro,:],0)
gmin_Pro_gf[i,:] = sum(gmin_sub_gf[iPro,:],0)
gmin_LowE[i,:] = sum(gmin_sub[iLowE,:],0)
gmin_LowE_gf[i,:] = sum(gmin_sub_gf[iLowE,:],0)
print('day = ',i+1,'of',ndays,sum(gmin_S1[i,:]),sum(gmin_S1_gf[i,:]))
savetxt('../data/gvmin/gvmin_Halo.txt',vstack((v_min,gmin_Iso)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_Halo_GF.txt',vstack((v_min,gmin_Iso_gf)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_Saus.txt',vstack((v_min,gmin_Saus)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_Saus_GF.txt',vstack((v_min,gmin_Saus_gf)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_S1.txt',vstack((v_min,gmin_S1)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_S1_GF.txt',vstack((v_min,gmin_S1_gf)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_S2.txt',vstack((v_min,gmin_S2)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_S2_GF.txt',vstack((v_min,gmin_S2_gf)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_Ret.txt',vstack((v_min,gmin_Ret)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_Ret_GF.txt',vstack((v_min,gmin_Ret_gf)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_Pro.txt',vstack((v_min,gmin_Pro)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_Pro_GF.txt',vstack((v_min,gmin_Pro_gf)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_LowE.txt',vstack((v_min,gmin_LowE)),delimiter='\t',fmt="%1.12f")
savetxt('../data/gvmin/gvmin_LowE_GF.txt',vstack((v_min,gmin_LowE_gf)),delimiter='\t',fmt="%1.12f")
|
import drned
import pytest
import common.test_here as common
import re
config = {}
def simple_load(name):
def f(p):
p.device.rload("simple_tests/%s" % name)
p.device.dry_run(fname="drned-work/drned-load.txt")
p.device.commit()
return True
return f
config["test-sequence-1"] = [
simple_load("sample-day0.txt"),
simple_load("sample-day1.txt"),
# Here we can do validations of dry-run after day1 is applied
lambda p: True
]
# Example of usage of config-chunks with validity checks of dry-run output
config["test-sequence-2"] = [
"""
some config here
some config here
some config here
some config here
""",
"""
some more config here
some more config here
some more config here
some more config here
""",
lambda p: (p.drned_load.count("this line must occur 3 times") == 3),
lambda p: (not ("this line must not occur" in p.drned_load)),
lambda p: (p.drned_load.index("this line must occur before") <
p.drned_load.index("this line must occur after")),
]
@pytest.mark.parametrize("name", config)
def test_here_ok(device, name):
common.test_here_single(device, config, name, dry_run=True)
|
import argparse
import csv
import os
from lib.tournament import Tournament, MatchResult, PlayerMatchResult, print_pairings, write_scorecard_csv
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Record results')
parser.add_argument('file', type=str, help='path to the tournament file')
parser.add_argument('results', type=str, help='path to the results file')
args = parser.parse_args()
results = []
with open(args.results) as results_csv:
reader = csv.reader(results_csv)
for row in reader:
player_match_results = {} # map from player name to PlayerMatchResult
draws = 0
for string in row:
if ':' in string:
# This is a player score
player_name, wins = string.split(':')
wins = int(wins)
player_match_results[player_name] = PlayerMatchResult(player_name, wins)
elif '-' in string:
# This is a drop
player_name = string[1:]
player_match_results[player_name].drop = True
else:
# This is a number of draws.
draws = int(string)
results.append(MatchResult(list(player_match_results.values()), draws=draws))
tournament = Tournament.load(args.file)
tournament.record_results(results)
tournament.save()
rankings = tournament.rankings()
with open(os.path.join(tournament.dir, 'rankings.csv'), 'w') as f:
writer = csv.writer(f)
writer.writerow(['rank', 'name', 'match points', 'opp match win %', 'game win %', 'opp game win %'])
for i, player in enumerate(rankings):
writer.writerow([
str(i+1),
player.name,
str(player.match_points),
'%.3f' % player.opp_match_win_percent,
'%.3f' % player.game_win_percent,
'%.3f' % player.opp_game_win_percent])
pairings = tournament.new_pairings()
print_pairings(pairings)
write_scorecard_csv(tournament.dir, pairings)
|
# Django
from django.db import models
# Models
from ...utils import PRMModel
# Fields
from django_extensions.db.fields import RandomCharField
class Event(PRMModel):
"""
Represents an event from a calendar, an event is an upcoming situation
of the user, aside from the expected information data, you can add
which of the contacts you'll be doing it
"""
owner = models.ForeignKey('users.User', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
code = RandomCharField(length=8, blank=False, null=False, unique=True)
description = models.CharField(max_length=2000, blank=True)
location = models.CharField(max_length=300)
date = models.DateField()
start_time = models.TimeField()
end_time = models.TimeField()
contacts = models.ManyToManyField('relations.Contact')
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Prefix related APIs.
"""
import logging
from ryu.lib.packet.bgp import EvpnEsi
from ryu.lib.packet.bgp import EvpnNLRI
from ryu.lib.packet.bgp import EvpnEthernetAutoDiscoveryNLRI
from ryu.lib.packet.bgp import EvpnMacIPAdvertisementNLRI
from ryu.lib.packet.bgp import EvpnInclusiveMulticastEthernetTagNLRI
from ryu.lib.packet.bgp import EvpnEthernetSegmentNLRI
from ryu.lib.packet.bgp import EvpnIpPrefixNLRI
from ryu.lib.packet.bgp import BGPPathAttributePmsiTunnel
from ryu.lib.packet.bgp import FlowSpecIPv4NLRI
from ryu.lib.packet.bgp import FlowSpecIPv6NLRI
from ryu.lib.packet.bgp import FlowSpecVPNv4NLRI
from ryu.lib.packet.bgp import FlowSpecVPNv6NLRI
from ryu.lib.packet.bgp import FlowSpecL2VPNNLRI
from ryu.lib.packet.bgp import BGPFlowSpecTrafficRateCommunity
from ryu.lib.packet.bgp import BGPFlowSpecTrafficActionCommunity
from ryu.lib.packet.bgp import BGPFlowSpecRedirectCommunity
from ryu.lib.packet.bgp import BGPFlowSpecTrafficMarkingCommunity
from ryu.lib.packet.bgp import BGPFlowSpecVlanActionCommunity
from ryu.lib.packet.bgp import BGPFlowSpecTPIDActionCommunity
from ryu.services.protocols.bgp.api.base import EVPN_ROUTE_TYPE
from ryu.services.protocols.bgp.api.base import EVPN_ESI
from ryu.services.protocols.bgp.api.base import EVPN_ETHERNET_TAG_ID
from ryu.services.protocols.bgp.api.base import REDUNDANCY_MODE
from ryu.services.protocols.bgp.api.base import MAC_ADDR
from ryu.services.protocols.bgp.api.base import IP_ADDR
from ryu.services.protocols.bgp.api.base import IP_PREFIX
from ryu.services.protocols.bgp.api.base import GW_IP_ADDR
from ryu.services.protocols.bgp.api.base import MPLS_LABELS
from ryu.services.protocols.bgp.api.base import NEXT_HOP
from ryu.services.protocols.bgp.api.base import PREFIX
from ryu.services.protocols.bgp.api.base import RegisterWithArgChecks
from ryu.services.protocols.bgp.api.base import ROUTE_DISTINGUISHER
from ryu.services.protocols.bgp.api.base import VPN_LABEL
from ryu.services.protocols.bgp.api.base import EVPN_VNI
from ryu.services.protocols.bgp.api.base import TUNNEL_TYPE
from ryu.services.protocols.bgp.api.base import PMSI_TUNNEL_TYPE
from ryu.services.protocols.bgp.api.base import FLOWSPEC_FAMILY
from ryu.services.protocols.bgp.api.base import FLOWSPEC_RULES
from ryu.services.protocols.bgp.api.base import FLOWSPEC_ACTIONS
from ryu.services.protocols.bgp.base import add_bgp_error_metadata
from ryu.services.protocols.bgp.base import PREFIX_ERROR_CODE
from ryu.services.protocols.bgp.base import validate
from ryu.services.protocols.bgp.core import BgpCoreError
from ryu.services.protocols.bgp.core_manager import CORE_MANAGER
from ryu.services.protocols.bgp.rtconf.base import ConfigValueError
from ryu.services.protocols.bgp.rtconf.base import RuntimeConfigError
from ryu.services.protocols.bgp.rtconf.vrfs import VRF_RF
from ryu.services.protocols.bgp.rtconf.vrfs import VRF_RF_IPV4
from ryu.services.protocols.bgp.rtconf.vrfs import VRF_RF_L2_EVPN
from ryu.services.protocols.bgp.utils import validation
LOG = logging.getLogger('bgpspeaker.api.prefix')
# Maximum value of the Ethernet Tag ID
EVPN_MAX_ET = EvpnNLRI.MAX_ET
# ESI Types
ESI_TYPE_ARBITRARY = EvpnEsi.ARBITRARY
ESI_TYPE_LACP = EvpnEsi.LACP
ESI_TYPE_L2_BRIDGE = EvpnEsi.L2_BRIDGE
ESI_TYPE_MAC_BASED = EvpnEsi.MAC_BASED
ESI_TYPE_ROUTER_ID = EvpnEsi.ROUTER_ID
ESI_TYPE_AS_BASED = EvpnEsi.AS_BASED
SUPPORTED_ESI_TYPES = [
ESI_TYPE_ARBITRARY,
ESI_TYPE_LACP,
ESI_TYPE_L2_BRIDGE,
ESI_TYPE_MAC_BASED,
ESI_TYPE_ROUTER_ID,
ESI_TYPE_AS_BASED,
]
# Constants used in API calls for EVPN
EVPN_ETH_AUTO_DISCOVERY = EvpnEthernetAutoDiscoveryNLRI.ROUTE_TYPE_NAME
EVPN_MAC_IP_ADV_ROUTE = EvpnMacIPAdvertisementNLRI.ROUTE_TYPE_NAME
EVPN_MULTICAST_ETAG_ROUTE = (
EvpnInclusiveMulticastEthernetTagNLRI.ROUTE_TYPE_NAME)
EVPN_ETH_SEGMENT = EvpnEthernetSegmentNLRI.ROUTE_TYPE_NAME
EVPN_IP_PREFIX_ROUTE = EvpnIpPrefixNLRI.ROUTE_TYPE_NAME
SUPPORTED_EVPN_ROUTE_TYPES = [
EVPN_ETH_AUTO_DISCOVERY,
EVPN_MAC_IP_ADV_ROUTE,
EVPN_MULTICAST_ETAG_ROUTE,
EVPN_ETH_SEGMENT,
EVPN_IP_PREFIX_ROUTE,
]
# Constants used in API calls for Flow Specification
FLOWSPEC_FAMILY_IPV4 = FlowSpecIPv4NLRI.FLOWSPEC_FAMILY
FLOWSPEC_FAMILY_IPV6 = FlowSpecIPv6NLRI.FLOWSPEC_FAMILY
FLOWSPEC_FAMILY_VPNV4 = FlowSpecVPNv4NLRI.FLOWSPEC_FAMILY
FLOWSPEC_FAMILY_VPNV6 = FlowSpecVPNv6NLRI.FLOWSPEC_FAMILY
FLOWSPEC_FAMILY_L2VPN = FlowSpecL2VPNNLRI.FLOWSPEC_FAMILY
SUPPORTED_FLOWSPEC_FAMILIES = (
FLOWSPEC_FAMILY_IPV4,
FLOWSPEC_FAMILY_IPV6,
FLOWSPEC_FAMILY_VPNV4,
FLOWSPEC_FAMILY_VPNV6,
FLOWSPEC_FAMILY_L2VPN,
)
# Constants for the Traffic Filtering Actions of Flow Specification
# Constants for the Traffic Filtering Actions of Flow Specification.
FLOWSPEC_ACTION_TRAFFIC_RATE = BGPFlowSpecTrafficRateCommunity.ACTION_NAME
FLOWSPEC_ACTION_TRAFFIC_ACTION = BGPFlowSpecTrafficActionCommunity.ACTION_NAME
FLOWSPEC_ACTION_REDIRECT = BGPFlowSpecRedirectCommunity.ACTION_NAME
FLOWSPEC_ACTION_TRAFFIC_MARKING = BGPFlowSpecTrafficMarkingCommunity.ACTION_NAME
FLOWSPEC_ACTION_VLAN = BGPFlowSpecVlanActionCommunity.ACTION_NAME
FLOWSPEC_ACTION_TPID = BGPFlowSpecTPIDActionCommunity.ACTION_NAME
SUPPORTTED_FLOWSPEC_ACTIONS = (
FLOWSPEC_ACTION_TRAFFIC_RATE,
FLOWSPEC_ACTION_TRAFFIC_ACTION,
FLOWSPEC_ACTION_REDIRECT,
FLOWSPEC_ACTION_TRAFFIC_MARKING,
FLOWSPEC_ACTION_VLAN,
FLOWSPEC_ACTION_TPID,
)
# Constants for ESI Label extended community
REDUNDANCY_MODE_ALL_ACTIVE = 'all_active'
REDUNDANCY_MODE_SINGLE_ACTIVE = 'single_active'
SUPPORTED_REDUNDANCY_MODES = [
REDUNDANCY_MODE_ALL_ACTIVE,
REDUNDANCY_MODE_SINGLE_ACTIVE,
]
# Constants for BGP Tunnel Encapsulation Attribute
TUNNEL_TYPE_VXLAN = 'vxlan'
TUNNEL_TYPE_NVGRE = 'nvgre'
TUNNEL_TYPE_MPLS = 'mpls'
TUNNEL_TYPE_MPLS_IN_GRE = 'mpls_in_gre'
TUNNEL_TYPE_VXLAN_GRE = 'vxlan_gre'
SUPPORTED_TUNNEL_TYPES = [
TUNNEL_TYPE_VXLAN,
TUNNEL_TYPE_NVGRE,
TUNNEL_TYPE_MPLS,
TUNNEL_TYPE_MPLS_IN_GRE,
TUNNEL_TYPE_VXLAN_GRE,
]
# Constants for PMSI Tunnel Attribute
PMSI_TYPE_NO_TUNNEL_INFO = (
BGPPathAttributePmsiTunnel.TYPE_NO_TUNNEL_INFORMATION_PRESENT
)
PMSI_TYPE_INGRESS_REP = (
BGPPathAttributePmsiTunnel.TYPE_INGRESS_REPLICATION
)
SUPPORTED_PMSI_TUNNEL_TYPES = [
PMSI_TYPE_NO_TUNNEL_INFO,
PMSI_TYPE_INGRESS_REP,
]
@add_bgp_error_metadata(code=PREFIX_ERROR_CODE,
sub_code=1,
def_desc='Unknown error related to operation on '
'prefixes')
class PrefixError(RuntimeConfigError):
pass
@validate(name=PREFIX)
def is_valid_prefix(prefix):
if not (validation.is_valid_ipv4_prefix(prefix)
or validation.is_valid_ipv6_prefix(prefix)):
raise ConfigValueError(conf_name=PREFIX,
conf_value=prefix)
@validate(name=NEXT_HOP)
def is_valid_next_hop(next_hop):
if not (validation.is_valid_ipv4(next_hop)
or validation.is_valid_ipv6(next_hop)):
raise ConfigValueError(conf_name=NEXT_HOP,
conf_value=next_hop)
@validate(name=EVPN_ROUTE_TYPE)
def is_valid_evpn_route_type(route_type):
if route_type not in SUPPORTED_EVPN_ROUTE_TYPES:
raise ConfigValueError(conf_name=EVPN_ROUTE_TYPE,
conf_value=route_type)
@validate(name=EVPN_ESI)
def is_valid_esi(esi):
if not validation.is_valid_esi(esi):
raise ConfigValueError(conf_name=EVPN_ESI,
conf_value=esi)
@validate(name=EVPN_ETHERNET_TAG_ID)
def is_valid_ethernet_tag_id(ethernet_tag_id):
if not validation.is_valid_ethernet_tag_id(ethernet_tag_id):
raise ConfigValueError(conf_name=EVPN_ETHERNET_TAG_ID,
conf_value=ethernet_tag_id)
@validate(name=REDUNDANCY_MODE)
def is_valid_redundancy_mode(redundancy_mode):
if redundancy_mode not in SUPPORTED_REDUNDANCY_MODES:
raise ConfigValueError(conf_name=REDUNDANCY_MODE,
conf_value=redundancy_mode)
@validate(name=MAC_ADDR)
def is_valid_mac_addr(addr):
if not validation.is_valid_mac(addr):
raise ConfigValueError(conf_name=MAC_ADDR,
conf_value=addr)
@validate(name=IP_ADDR)
def is_valid_ip_addr(addr):
# Note: Allows empty IP Address (means length=0).
# e.g.) L2VPN MAC advertisement of Cisco NX-OS
if not (addr is None
or validation.is_valid_ipv4(addr)
or validation.is_valid_ipv6(addr)):
raise ConfigValueError(conf_name=IP_ADDR,
conf_value=addr)
@validate(name=IP_PREFIX)
def is_valid_ip_prefix(prefix):
if not (validation.is_valid_ipv4_prefix(prefix)
or validation.is_valid_ipv6_prefix(prefix)):
raise ConfigValueError(conf_name=IP_PREFIX,
conf_value=prefix)
@validate(name=GW_IP_ADDR)
def is_valid_gw_ip_addr(addr):
if not (validation.is_valid_ipv4(addr)
or validation.is_valid_ipv6(addr)):
raise ConfigValueError(conf_name=GW_IP_ADDR,
conf_value=addr)
@validate(name=MPLS_LABELS)
def is_valid_mpls_labels(labels):
if not validation.is_valid_mpls_labels(labels):
raise ConfigValueError(conf_name=MPLS_LABELS,
conf_value=labels)
@validate(name=EVPN_VNI)
def is_valid_vni(vni):
if not validation.is_valid_vni(vni):
raise ConfigValueError(conf_name=EVPN_VNI,
conf_value=vni)
@validate(name=TUNNEL_TYPE)
def is_valid_tunnel_type(tunnel_type):
if tunnel_type not in SUPPORTED_TUNNEL_TYPES:
raise ConfigValueError(conf_name=TUNNEL_TYPE,
conf_value=tunnel_type)
@validate(name=PMSI_TUNNEL_TYPE)
def is_valid_pmsi_tunnel_type(pmsi_tunnel_type):
if pmsi_tunnel_type not in SUPPORTED_PMSI_TUNNEL_TYPES:
raise ConfigValueError(conf_name=PMSI_TUNNEL_TYPE,
conf_value=pmsi_tunnel_type)
@validate(name=FLOWSPEC_FAMILY)
def is_valid_flowspec_family(flowspec_family):
if flowspec_family not in SUPPORTED_FLOWSPEC_FAMILIES:
raise ConfigValueError(conf_name=FLOWSPEC_FAMILY,
conf_value=flowspec_family)
@validate(name=FLOWSPEC_RULES)
def is_valid_flowspec_rules(rules):
if not isinstance(rules, dict):
raise ConfigValueError(conf_name=FLOWSPEC_RULES,
conf_value=rules)
@validate(name=FLOWSPEC_ACTIONS)
def is_valid_flowspec_actions(actions):
for k in actions:
if k not in SUPPORTTED_FLOWSPEC_ACTIONS:
raise ConfigValueError(conf_name=FLOWSPEC_ACTIONS,
conf_value=actions)
@RegisterWithArgChecks(name='prefix.add_local',
req_args=[ROUTE_DISTINGUISHER, PREFIX, NEXT_HOP],
opt_args=[VRF_RF])
def add_local(route_dist, prefix, next_hop, route_family=VRF_RF_IPV4):
"""Adds *prefix* from VRF identified by *route_dist* and sets the source as
network controller.
"""
try:
# Create new path and insert into appropriate VRF table.
tm = CORE_MANAGER.get_core_service().table_manager
label = tm.update_vrf_table(route_dist, prefix, next_hop, route_family)
# Currently we only allocate one label per local_prefix,
# so we share first label from the list.
if label:
label = label[0]
# Send success response with new label.
return [{ROUTE_DISTINGUISHER: route_dist, PREFIX: prefix,
VRF_RF: route_family, VPN_LABEL: label}]
except BgpCoreError as e:
raise PrefixError(desc=e)
@RegisterWithArgChecks(name='prefix.delete_local',
req_args=[ROUTE_DISTINGUISHER, PREFIX],
opt_args=[VRF_RF])
def delete_local(route_dist, prefix, route_family=VRF_RF_IPV4):
"""Deletes/withdraws *prefix* from VRF identified by *route_dist* and
source as network controller.
"""
try:
tm = CORE_MANAGER.get_core_service().table_manager
tm.update_vrf_table(route_dist, prefix,
route_family=route_family, is_withdraw=True)
# Send success response.
return [{ROUTE_DISTINGUISHER: route_dist, PREFIX: prefix,
VRF_RF: route_family}]
except BgpCoreError as e:
raise PrefixError(desc=e)
# =============================================================================
# BGP EVPN Routes related APIs
# =============================================================================
@RegisterWithArgChecks(name='evpn_prefix.add_local',
req_args=[EVPN_ROUTE_TYPE, ROUTE_DISTINGUISHER,
NEXT_HOP],
opt_args=[EVPN_ESI, EVPN_ETHERNET_TAG_ID,
REDUNDANCY_MODE, MAC_ADDR, IP_ADDR, IP_PREFIX,
GW_IP_ADDR, EVPN_VNI, TUNNEL_TYPE,
PMSI_TUNNEL_TYPE])
def add_evpn_local(route_type, route_dist, next_hop, **kwargs):
"""Adds EVPN route from VRF identified by *route_dist*.
"""
if(route_type in [EVPN_ETH_AUTO_DISCOVERY, EVPN_ETH_SEGMENT]
and kwargs['esi'] == 0):
raise ConfigValueError(conf_name=EVPN_ESI,
conf_value=kwargs['esi'])
try:
# Create new path and insert into appropriate VRF table.
tm = CORE_MANAGER.get_core_service().table_manager
label = tm.update_vrf_table(route_dist, next_hop=next_hop,
route_family=VRF_RF_L2_EVPN,
route_type=route_type, **kwargs)
# Currently we only allocate one label per local route,
# so we share first label from the list.
if label:
label = label[0]
# Send success response with new label.
return [{EVPN_ROUTE_TYPE: route_type,
ROUTE_DISTINGUISHER: route_dist,
VRF_RF: VRF_RF_L2_EVPN,
VPN_LABEL: label}.update(kwargs)]
except BgpCoreError as e:
raise PrefixError(desc=e)
@RegisterWithArgChecks(name='evpn_prefix.delete_local',
req_args=[EVPN_ROUTE_TYPE, ROUTE_DISTINGUISHER],
opt_args=[EVPN_ESI, EVPN_ETHERNET_TAG_ID, MAC_ADDR,
IP_ADDR, IP_PREFIX, EVPN_VNI])
def delete_evpn_local(route_type, route_dist, **kwargs):
"""Deletes/withdraws EVPN route from VRF identified by *route_dist*.
"""
try:
tm = CORE_MANAGER.get_core_service().table_manager
tm.update_vrf_table(route_dist,
route_family=VRF_RF_L2_EVPN,
route_type=route_type, is_withdraw=True, **kwargs)
# Send success response.
return [{EVPN_ROUTE_TYPE: route_type,
ROUTE_DISTINGUISHER: route_dist,
VRF_RF: VRF_RF_L2_EVPN}.update(kwargs)]
except BgpCoreError as e:
raise PrefixError(desc=e)
# =============================================================================
# BGP Flow Specification Routes related APIs
# =============================================================================
@RegisterWithArgChecks(
name='flowspec.add_local',
req_args=[FLOWSPEC_FAMILY, ROUTE_DISTINGUISHER, FLOWSPEC_RULES],
opt_args=[FLOWSPEC_ACTIONS])
def add_flowspec_local(flowspec_family, route_dist, rules, **kwargs):
"""Adds Flow Specification route from VRF identified by *route_dist*.
"""
try:
# Create new path and insert into appropriate VRF table.
tm = CORE_MANAGER.get_core_service().table_manager
tm.update_flowspec_vrf_table(
flowspec_family=flowspec_family, route_dist=route_dist,
rules=rules, **kwargs)
# Send success response.
return [{FLOWSPEC_FAMILY: flowspec_family,
ROUTE_DISTINGUISHER: route_dist,
FLOWSPEC_RULES: rules}.update(kwargs)]
except BgpCoreError as e:
raise PrefixError(desc=e)
@RegisterWithArgChecks(
name='flowspec.del_local',
req_args=[FLOWSPEC_FAMILY, ROUTE_DISTINGUISHER, FLOWSPEC_RULES])
def del_flowspec_local(flowspec_family, route_dist, rules):
"""Deletes/withdraws Flow Specification route from VRF identified
by *route_dist*.
"""
try:
tm = CORE_MANAGER.get_core_service().table_manager
tm.update_flowspec_vrf_table(
flowspec_family=flowspec_family, route_dist=route_dist,
rules=rules, is_withdraw=True)
# Send success response.
return [{FLOWSPEC_FAMILY: flowspec_family,
ROUTE_DISTINGUISHER: route_dist,
FLOWSPEC_RULES: rules}]
except BgpCoreError as e:
raise PrefixError(desc=e)
|
from bolt4ds import flow as d6tflow
import bolt4ds.flow.tasks
import luigi
import pandas as pd
# define 2 tasks that load raw data
class Task1(d6tflow.tasks.TaskPqPandas):
def run(self):
df = pd.DataFrame({'a':range(3)})
self.save(df) # quickly save dataframe
class Task2(Task1):
pass
# define another task that depends on data from task1 and task2
@d6tflow.requires(Task1,Task2)
class Task3(d6tflow.tasks.TaskPqPandas):
multiplier = luigi.IntParameter(default=2)
def run(self):
df1 = self.input()[0].load() # quickly load input data
df2 = self.input()[1].load() # quickly load input data
df = df1.join(df2, lsuffix='1', rsuffix='2')
df['b']=df['a1']*self.multiplier # use task parameter
self.save(df)
# Execute task including all its dependencies
d6tflow.run(Task3())
'''
* 3 ran successfully:
- 1 Task1()
- 1 Task2()
- 1 Task3(multiplier=2)
'''
Task3().outputLoad() # quickly load output data. Task1().outputLoad() also works
'''
a1 a2 b
0 0 0 0
1 1 1 2
2 2 2 4
'''
# Intelligently rerun workflow after changing parameters
d6tflow.preview(Task3(multiplier=3))
'''
└─--[Task3-{'multiplier': '3'} (PENDING)] => this changed and needs to run
|--[Task1-{} (COMPLETE)] => this doesn't change and doesn't need to rerun
└─--[Task2-{} (COMPLETE)] => this doesn't change and doesn't need to rerun
'''
|
from __future__ import absolute_import, division, print_function, unicode_literals
import HTMLParser
import Queue
import json
import urllib
import urllib2
from echomesh.util import Log
LOGGER = Log.logger(__name__)
ROOT = 'http://search.twitter.com/search.json'
PARSER = HTMLParser.HTMLParser()
def json_to_tweet(tweet):
def get(name):
return urllib.unquote(PARSER.unescape(tweet.get(name, '')))
image_url = get('profile_image_url')
try:
short_url = image_url.replace('_normal.', '.')
urllib2.urlopen(short_url)
image_url = short_url
except:
pass
return {'image_url': image_url,
'text': get('text'),
'user': get('from_user'),
'user_name': get('from_user_name'),
}
class Search(object):
def __init__(self, key, callback, preload=1):
self.key = key
self.preload = preload
self.queue = Queue.Queue()
self.callback = callback
self.max_id_str = ''
def refresh(self):
keywords = {'q': self.key}
first_time = not self.max_id_str
if not first_time:
keywords['since_id'] = self.max_id_str
raw = urllib2.urlopen(ROOT, urllib.urlencode(keywords)).read().decode('utf8')
result = json.loads(raw)
self.max_id_str = result['max_id_str']
tweets = result['results']
if first_time:
tweets = tweets[:self.preload]
for tweet in tweets:
self.callback(json_to_tweet(tweet))
|
# -*- coding:utf8 -*-
import time
from lib.unit.Unit import Unit
from lib.unit.Party import Party
from lib.pixel.PixelData import PixelData
from lib.config.SysConfig import SysConfig
from lib.struct.CoordiPoint import CoordiPoint
from lib.control.Control import Control
# 宠物
class Pet(Unit):
def __init__(self):
Unit.__init__(self)
def getMaxLife(self):
return PixelData.getPointByIndex(SysConfig.pixelIndex["petHealthMax"]).getInt()
def getCurrentLife(self):
return PixelData.getPointByIndex(SysConfig.pixelIndex["petHealthCurrent"]).getInt()
def getMaxMana(self):
return PixelData.getPointByIndex(SysConfig.pixelIndex["petPowerMax"]).getInt()
def getCurrentMana(self):
return PixelData.getPointByIndex(SysConfig.pixelIndex["petPowerCurrent"]).getInt()
def getStatus(self):
bools = PixelData.get_point_bools(SysConfig.pixelIndex["petStatus"], 23)
return {
"exists": bools[0],
"dead": bools[1],
"combat": bools[2],
"visible": bools[3],
"happy": bools[4], # 猎人宠物有效 1 = unhappy, 2 = content, 3 = happy
"slot4": bools[5],
"slot5": bools[6],
"slot6": bools[7],
"slot7": bools[8],
}
|
# Copyright 2019 Matthew Bishop
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
from flask_login import (
current_user,
login_required)
from flask_restful import (
fields,
marshal,
Resource)
from pony.orm import (
db_session,
desc,
select)
from pony.orm.core import Query
from typing import (
Callable,
List)
from database import (
Entry as EntryModel,
SourceUserData as SourceUserDataModel,
Tag as TagModel,
User as UserModel)
def get_user_data_for_entry(entry: EntryModel) -> SourceUserDataModel:
"""Gets the source user data for the given entry's source and the logged-in user.
:param entry: The entry who's source we're looking up.
:return: The related source user data.
"""
user: UserModel = UserModel[current_user.user_id]
user_data: SourceUserDataModel = SourceUserDataModel.get(source=entry.source, user=user)
return user_data
# entries
source_in_entry_fields: dict = {
'id': fields.Integer(attribute='source.id'),
'link': fields.String(attribute='source.link'),
'label': fields.String,
'tags': fields.List(fields.String(attribute='label'))
}
entry_fields: dict = {
'id': fields.Integer,
'link': fields.String,
'source': fields.Nested(source_in_entry_fields, attribute=get_user_data_for_entry),
'summary': fields.String,
'title': fields.String,
'updated': fields.DateTime
}
class Entries(Resource):
"""REST endpoint for feed entries."""
decorators: List[Callable] = [login_required]
@staticmethod
def get() -> List[OrderedDict]:
"""Returns all feed entries for sources the logged-in user is subscribed to.
:return: The entries, as a list of JSON-serializable dicts.
"""
with db_session:
# get a list of entries from sources of feeds followed by the logged-in user
user: UserModel = UserModel[current_user.user_id]
sources: Query = select(s.source for s in user.sources)
result: Query = select(e for e in EntryModel if e.source in sources).order_by(desc(EntryModel.updated))
entries: List[EntryModel] = list(result)
# marshall them to JSON-serializable dicts
output: List[OrderedDict] = marshal(entries, entry_fields)
return output
# sources
tag_in_source_fields: dict = {
'id': fields.Integer,
'label': fields.String
}
source_fields: dict = {
'feed_uri': fields.String(attribute='source.feed_uri'),
'id': fields.Integer(attribute='source.id'),
'label': fields.String,
'last_check': fields.DateTime(attribute='source.last_check'),
'last_fetch': fields.DateTime(attribute='source.last_fetch'),
'link': fields.String(attribute='source.link'),
# TODO: pick one tag field
'tag_labels': fields.List(fields.String(attribute='label'), attribute='tags'),
# 'tag_objects': fields.List(fields.Nested(tag_in_source_fields), attribute='tags') # TODO: fix, still broken
}
class Sources(Resource):
"""REST endpoint for feed sources."""
decorators = [login_required]
@staticmethod
def get() -> List[OrderedDict]:
"""Returns all sources the logged-in user is subscribed to.
:return: The sources, as a list of JSON-serializable dicts.
"""
with db_session:
# get a list of sources followed by the logged-in user
user: UserModel = UserModel[current_user.user_id]
sources: List[SourceUserDataModel] = list(user.sources)
# marshall them to JSON-serializable dicts
output: List[OrderedDict] = marshal(sources, source_fields)
return output
# tags
tag_fields: dict = {
'id': fields.Integer,
'label': fields.String
}
class Tags(Resource):
"""REST endpoint for feed tags."""
decorators = [login_required]
@staticmethod
def get() -> List[OrderedDict]:
"""Returns all tags defined by the logged-in user.
:return: The tags, as a list of JSON-serializable dicts.
"""
with db_session:
# get tags from the user
user: UserModel = UserModel[current_user.user_id]
tags: List[TagModel] = list(user.tags)
# marshall them to JSON-serializable dicts
output: List[OrderedDict] = marshal(tags, tag_fields)
return output
|
from pinmap import PinMap
pins = PinMap('/proc', 'adc', 6)
def analog_read(channel):
"""Return the integer value of an adc pin.
adc0 and adc1 have 6 bit resolution.
adc2 through adc5 have 12 bit resolution.
"""
with open(pins.get_path(channel), 'r') as f:
return int(f.read(32).split(':')[1].strip())
|
distancia = float(input('qual é a distancia da sua viagem? '))
print('Voce esta prestes a começar uma viagem de {}Km.'.format(distancia))
preco = distancia * 0.50 if distancia <= 200 else distancia * 0.45
print('É o preço da sua passagem será de R${:.2f}'.format(preco))
|
# (c) Copyright IBM Corp. 2010, 2020. All Rights Reserved.
# -*- coding: utf-8 -*-
# (c) Copyright IBM Corp. 2020. All Rights Reserved.
import sys
from fn_exchange_online.lib.ms_graph_helper import MSGraphHelper
from resilient_lib.components.integration_errors import IntegrationError
if sys.version_info.major == 2:
from mock import patch
else:
from unittest.mock import patch
MOCKED_OPTS = {
"microsoft_graph_token_url": "microsoft_graph_token_url",
"microsoft_graph_url": u'microsoft_graph_url',
"tenant_id": "tenant_id",
"client_id": "client_id",
"client_secret": "client_secret",
"max_messages": "100",
"max_users": "2000"
}
def generate_response(content, status):
class simResponse:
def __init__(self, content, status):
self.status_code = status
self.content = content
def json(self):
return self.content
return simResponse(content, status)
class TestMSGraphHelper(object):
""" Tests for the MSGraphHelper functions"""
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.get')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_get_user_profile(self, authenticate_mock, get_mock):
""" Test Get User Profile"""
print("Test Get User Profile\n")
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
content = {"displayName": "Tester"}
# Test
get_mock.return_value = generate_response(content, 200)
response = MS_graph_helper.get_user_profile("[email protected]")
assert response.status_code == 200
assert response.content["displayName"] == "Tester"
get_mock.return_value = generate_response(content, 404)
response = MS_graph_helper.get_user_profile("[email protected]")
assert response.status_code == 404
assert response.content["displayName"] == "Tester"
get_mock.return_value = generate_response(content, 300)
response = MS_graph_helper.get_user_profile("[email protected]")
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.delete')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_delete_message(self, authenticate_mock, delete_mock):
""" Test """
print("Test Delete Message\n")
content = {"displayName": "Tester"}
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
delete_mock.return_value = generate_response(content, 204)
response = MS_graph_helper.delete_message("[email protected]", None, "AAAA")
assert response.status_code == 204
delete_mock.return_value = generate_response(content, 300)
response = MS_graph_helper.delete_message("[email protected]", None, "AAAA")
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.get')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_get_message(self, authenticate_mock, get_mock):
""" Test Get Message"""
print("Test Get Message\n")
content = {"displayName": "Tester"}
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
get_mock.return_value = generate_response(content, 200)
response = MS_graph_helper.get_message("[email protected]", "AAA")
assert response.status_code == 200
assert response.content["displayName"] == "Tester"
get_mock.return_value = generate_response(content, 404)
response = MS_graph_helper.get_message("[email protected]", "AAA")
assert response.status_code == 404
assert response.content["displayName"] == "Tester"
get_mock.return_value = generate_response(content, 300)
response = MS_graph_helper.get_message("[email protected]", "AAA")
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.get')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_get_message_mime(self, authenticate_mock, get_mock):
""" Test Get Message Mime"""
print("Test Get Message Mime\n")
content = {"displayName": "Tester"}
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
get_mock.return_value = generate_response(content, 200)
response = MS_graph_helper.get_message_mime("[email protected]", "AAA")
assert response.status_code == 200
assert response.content["displayName"] == "Tester"
get_mock.return_value = generate_response(content, 404)
response = MS_graph_helper.get_message_mime("[email protected]", "AAA")
assert response.status_code == 404
assert response.content["displayName"] == "Tester"
get_mock.return_value = generate_response(content, 300)
response = MS_graph_helper.get_message_mime("[email protected]", "AAA")
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.post')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_move_message(self, authenticate_mock, post_mock):
""" Test Move Message"""
print("Test Move Message\n")
content = {"displayName": "Tester"}
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
post_mock.return_value = generate_response(content, 201)
response = MS_graph_helper.move_message("[email protected]", None, "AAA", {'name': "recoverableitemsdeletions"})
assert response.status_code == 201
assert response.content["displayName"] == "Tester"
post_mock.return_value = generate_response(content, 404)
response = MS_graph_helper.move_message("[email protected]", None, "AAA", {'name': "recoverableitemsdeletions"})
assert response.status_code == 404
assert response.content["displayName"] == "Tester"
post_mock.return_value = generate_response(content, 300)
response = MS_graph_helper.move_message("[email protected]", None, "AAA", {'name': "recoverableitemsdeletions"})
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.get')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_get_users(self, authenticate_mock, get_mock):
""" Test Get User"""
print("Test Get Users\n")
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
content = {
'value': [{'userPrincipalName': '[email protected]'}, {'userPrincipalName': '[email protected]'}]}
get_mock.return_value = generate_response(content, 200)
user_list = MS_graph_helper.get_users()
assert len(user_list) == 2
assert user_list[0]['userPrincipalName'] == '[email protected]'
assert user_list[1]['userPrincipalName'] == '[email protected]'
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.get')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_query_messages_all_users(self, authenticate_mock, mocked_get):
""" Test Get User"""
print("Test Query Messages All Users\n")
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
# Mock the users
content1 = {
'value': [{'userPrincipalName': '[email protected]'}, {'userPrincipalName': '[email protected]'}]}
# Mock the email lists for user 1
content2 = {'value': [{'id': 'AAA'}, {'id': 'BBB'}]}
# Mock the email lists for user 2
content3 = {'value': [{'id': 'CCC'}]}
mocked_get.side_effect = [generate_response(content1, 200),
generate_response(content2, 200),
generate_response(content3, 200)]
email_list = MS_graph_helper.query_messages("all", None, None, None, None, None, "lunch", None)
assert len(email_list) == 2
assert email_list[0]['email_address'] == '[email protected]'
assert email_list[0]['status_code'] == 200
assert email_list[0]['email_list'][0]['id'] == 'AAA'
assert email_list[0]['email_list'][1]['id'] == 'BBB'
assert email_list[1]['email_address'] == '[email protected]'
assert email_list[1]['status_code'] == 200
assert email_list[1]['email_list'][0]['id'] == 'CCC'
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.get')
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_query_messages(self, authenticate_mock, mocked_get):
""" Test Get User"""
print("Test Query Messages Single User\n")
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
# Mock the email lists for user 1
content1 = {'value': [{'id': 'AAA'}, {'id': 'BBB'}]}
mocked_get.side_effect = [generate_response(content1, 200)]
result_list = MS_graph_helper.query_messages("[email protected]", None, "[email protected]", None,
None, None, "lunch", None)
assert len(result_list) == 1
assert result_list[0]['email_address'] == '[email protected]'
assert result_list[0]['status_code'] == 200
assert result_list[0]['email_list'][0]['id'] == 'AAA'
assert result_list[0]['email_list'][1]['id'] == 'BBB'
mocked_get.side_effect = [generate_response(content1, 404)]
result_list = MS_graph_helper.query_messages("[email protected]", None, None, None, None, None, "lunch",
None)
assert len(result_list) == 1
assert result_list[0]['status_code'] == 404
assert len(result_list[0]['email_list']) == 0
mocked_get.side_effect = [generate_response(content1, 300)]
result_list = MS_graph_helper.query_messages("[email protected]", None, None, None, None, None, "lunch",
None)
except IntegrationError as err:
assert True
@patch('fn_exchange_online.lib.ms_graph_helper.OAuth2ClientCredentialsSession.authenticate')
def test_build_query_url(self, authenticate_mock):
""" Test Build Query URL"""
print("Test build MS Graph Query URL\n")
try:
authenticate_mock.return_value = True
MS_graph_helper = MSGraphHelper(MOCKED_OPTS.get("microsoft_graph_token_url"),
MOCKED_OPTS.get("microsoft_graph_url"),
MOCKED_OPTS.get("tenant_id"),
MOCKED_OPTS.get("client_id"),
MOCKED_OPTS.get("client_secret"),
MOCKED_OPTS.get("max_messages"),
MOCKED_OPTS.get("max_users"),
None)
# Param list: email_address, mail_folder, sender, start_date, end_date, has_attachments, message_subject,
# message_body
# Test sender, hasAttachments, message subject
url = MS_graph_helper.build_MS_graph_query_url("[email protected]", None, "[email protected]", None,
None, True, "lunch", None)
assert url == u'microsoft_graph_url/users/[email protected]/messages?$filter=(from/emailAddress/address%20eq%20\'[email protected]\')%20and%20(hasAttachments%20eq%20true)%20and%20(contains(subject,\'lunch\'))'
# Test $search in query (message body)
url = MS_graph_helper.build_MS_graph_query_url("[email protected]", None, "[email protected]", None,
None, True, None, "lunch")
assert url == u'microsoft_graph_url/users/[email protected]/messages?$search="lunch"&?$filter=(from/emailAddress/address%20eq%20\'[email protected]\')%20and%20(hasAttachments%20eq%20true)'
# Test query: sender, start date, hasAttachments
url = MS_graph_helper.build_MS_graph_query_url("[email protected]", None, "[email protected]",
1577854800000, None, True, None, None)
#assert url == u'microsoft_graph_url/users/[email protected]/messages?$filter=(receivedDateTime%20ge%202020-01-01T00:00:00Z)%20and%20(from/emailAddress/address%20eq%20"[email protected]")%20and%20(hasAttachments%20eq%20true)'
# Test $search in query (sender, start date, hasAttachments)
url = MS_graph_helper.build_MS_graph_query_url("[email protected]", None, "[email protected]",
1577854800000, None, None, None, "lunch")
#assert url == u'microsoft_graph_url/users/[email protected]/messages?$search="lunch"&?$filter=(receivedDateTime%20ge%202020-01-01T00:00:00Z)%20and%20(from/emailAddress/address%20eq%20"[email protected]")'
# Test $search in query sender, start and end date
url = MS_graph_helper.build_MS_graph_query_url("[email protected]", None, "[email protected]",
1577854800000, 1577895870000, None, None, "lunch")
#assert url == u'microsoft_graph_url/users/[email protected]/messages?$search="lunch"&?$filter=(receivedDateTime%20ge%202020-01-01T00:00:00Z)%20and%20(receivedDateTime%20le%202020-01-01T11:24:30Z)%20and%20(from/emailAddress/address%20eq%20"[email protected]")'
# No query parameters will cause IntegrationError
url = MS_graph_helper.build_MS_graph_query_url("[email protected]", None, None, None, None, None, None,
None)
except IntegrationError as err:
assert True
|
import argparse
import logging
import os
import pickle
import sys
import numpy as np
import kb
import template_builder
import utils
def get_input(fact, y, template_obj_list,add_ids):
if (add_ids):
x = [fact[0],fact[1],fact[2]]
else:
x = []
for template in template_obj_list:
x.extend(template.get_input(fact))
x.append(y)
return x
def preprocess(kb, template_obj_list, negative_count,add_ids,y_labels):
new_facts = []
ctr = 0
for facts in kb.facts:
if(ctr % 500 == 0):
logging.info("Processed {0} facts out of {1}".format(
ctr, len(kb.facts)))
ns = np.random.randint(0, len(kb.entity_map), negative_count)
no = np.random.randint(0, len(kb.entity_map), negative_count)
new_facts.append(get_input(facts, y_labels[ctr], template_obj_list,add_ids))
for neg_facts in range(negative_count):
new_fact = (ns[neg_facts], facts[1], facts[2])
new_facts.append(get_input(new_fact, 0, template_obj_list,add_ids))
new_fact = (facts[0], facts[1], no[neg_facts])
new_facts.append(get_input(new_fact, 0, template_obj_list,add_ids))
ctr += 1
return np.array(new_facts)
def write_to_file(facts, fileprefix):
with open(fileprefix+".pkl", "wb") as f:
pickle.dump(facts, f)
logging.info("Written data to {0}".format(fileprefix+".txt"))
np.savetxt(fileprefix+".txt", facts, delimiter=',',fmt='%.6e')
logging.info("Written data to {0}".format(fileprefix+".pkl"))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
'-d', '--dataset', help="Name of the dataset as in data folder", required=True)
parser.add_argument(
'-m', '--model_type', help="model name. Can be distmult or complex ", required=True)
parser.add_argument('-f', '--preprocess_file',
required=True, help="Path of the file which is to be preprocessed")
parser.add_argument('-y', '--y_labels',
required=False, help="Path of the y label file, which has same number of lines as preprocess_file. Use it to generate test or valid data, which has y labels instead of 1 and 0 in last column",default='')
parser.add_argument('-s', '--sm_data_write',
required=True, default="selection_module.data")
parser.add_argument('-w', '--model_weights',
help="Pickle file of model wieghts", required=True)
parser.add_argument('-l', '--template_load_dir',
required=False, default=None)
parser.add_argument('-v', '--oov_entity', required=False, default=True)
parser.add_argument('--t_ids', nargs='+', type=int, required=True,
help='List of templates to run for')
parser.add_argument('--del_ids', action='store_true', required=False,
help='Use the flag to delete entity and relation ids to the start of row\nDefault behaviour is to add ids in front of each record.')
parser.add_argument('--data_repo_root',
required=False, default='data')
parser.add_argument('--negative_count',
required=False, type=int,default=2)
parser.add_argument('--log_level',
default='INFO',
dest='log_level',
type=utils._log_level_string_to_int,
nargs='?',
help='Set the logging output level. {0}'.format(utils._LOG_LEVEL_STRINGS))
args = parser.parse_args()
logging.basicConfig(format='%(levelname)s :: %(asctime)s - %(message)s',
level=args.log_level, datefmt='%d/%m/%Y %I:%M:%S %p')
if(args.y_labels != '' and args.negative_count!=0):
logging.error('Cannot generate random samples with y labels. If using --y_labels use flag --negative_count 0 also')
exit(-1)
dataset_root = os.path.join(args.data_repo_root, args.dataset)
template_objs = template_builder.template_obj_builder(dataset_root, args.model_weights,args.template_load_dir,None, args.model_type, args.t_ids, args.oov_entity)
ktrain = template_objs[0].kb
k_preprocess = kb.KnowledgeBase(args.preprocess_file, ktrain.entity_map, ktrain.relation_map,add_unknowns=not args.oov_entity)
y_labels = [1 for _ in range(k_preprocess.facts.shape[0])]
if(args.y_labels != ''):
#y_labels = np.loadtxt(args.y_labels)
y_labels,y_multilabels = utils.read_multilabel(args.y_labels)
if(y_labels.shape[0] != k_preprocess.facts.shape[0]):
logging.error('Number of facts and their y labels do not match')
exit(-1)
new_facts = preprocess(k_preprocess, template_objs, args.negative_count, not args.del_ids, y_labels)
write_to_file(new_facts, args.sm_data_write)
|
from __future__ import print_function
import argparse
import collections
import json
import logging
import os
import random
import time
from abc import abstractmethod
from BCBio import GFF
from Bio import SeqIO
import requests
from six.moves.builtins import next
from six.moves.builtins import object
from six.moves.builtins import str
try:
import StringIO as io
except BaseException:
import io
logging.getLogger("requests").setLevel(logging.CRITICAL)
log = logging.getLogger()
#############################################
# BEGIN IMPORT OF CACHING LIBRARY #
#############################################
# This code is licensed under the MIT #
# License and is a copy of code publicly #
# available in rev. #
# e27332bc82f4e327aedaec17c9b656ae719322ed #
# of https://github.com/tkem/cachetools/ #
#############################################
class DefaultMapping(collections.MutableMapping):
__slots__ = ()
@abstractmethod
def __contains__(self, key): # pragma: nocover
return False
@abstractmethod
def __getitem__(self, key): # pragma: nocover
if hasattr(self.__class__, '__missing__'):
return self.__class__.__missing__(self, key)
else:
raise KeyError(key)
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
__marker = object()
def pop(self, key, default=__marker):
if key in self:
value = self[key]
del self[key]
elif default is self.__marker:
raise KeyError(key)
else:
value = default
return value
def setdefault(self, key, default=None):
if key in self:
value = self[key]
else:
self[key] = value = default
return value
DefaultMapping.register(dict)
class _DefaultSize(object):
def __getitem__(self, _):
return 1
def __setitem__(self, _, value):
assert value == 1
def pop(self, _):
return 1
class Cache(DefaultMapping):
"""Mutable mapping to serve as a simple cache or cache base class."""
__size = _DefaultSize()
def __init__(self, maxsize, missing=None, getsizeof=None):
if missing:
self.__missing = missing
if getsizeof:
self.__getsizeof = getsizeof
self.__size = dict()
self.__data = dict()
self.__currsize = 0
self.__maxsize = maxsize
def __repr__(self):
return '%s(%r, maxsize=%r, currsize=%r)' % (
self.__class__.__name__,
list(self.__data.items()),
self.__maxsize,
self.__currsize,
)
def __getitem__(self, key):
try:
return self.__data[key]
except KeyError:
return self.__missing__(key)
def __setitem__(self, key, value):
maxsize = self.__maxsize
size = self.getsizeof(value)
if size > maxsize:
raise ValueError('value too large')
if key not in self.__data or self.__size[key] < size:
while self.__currsize + size > maxsize:
self.popitem()
if key in self.__data:
diffsize = size - self.__size[key]
else:
diffsize = size
self.__data[key] = value
self.__size[key] = size
self.__currsize += diffsize
def __delitem__(self, key):
size = self.__size.pop(key)
del self.__data[key]
self.__currsize -= size
def __contains__(self, key):
return key in self.__data
def __missing__(self, key):
value = self.__missing(key)
try:
self.__setitem__(key, value)
except ValueError:
pass # value too large
return value
def __iter__(self):
return iter(self.__data)
def __len__(self):
return len(self.__data)
@staticmethod
def __getsizeof(value):
return 1
@staticmethod
def __missing(key):
raise KeyError(key)
@property
def maxsize(self):
"""The maximum size of the cache."""
return self.__maxsize
@property
def currsize(self):
"""The current size of the cache."""
return self.__currsize
def getsizeof(self, value):
"""Return the size of a cache element's value."""
return self.__getsizeof(value)
class _Link(object):
__slots__ = ('key', 'expire', 'next', 'prev')
def __init__(self, key=None, expire=None):
self.key = key
self.expire = expire
def __reduce__(self):
return _Link, (self.key, self.expire)
def unlink(self):
next = self.next
prev = self.prev
prev.next = next
next.prev = prev
class _Timer(object):
def __init__(self, timer):
self.__timer = timer
self.__nesting = 0
def __call__(self):
if self.__nesting == 0:
return self.__timer()
else:
return self.__time
def __enter__(self):
if self.__nesting == 0:
self.__time = time = self.__timer()
else:
time = self.__time
self.__nesting += 1
return time
def __exit__(self, *exc):
self.__nesting -= 1
def __reduce__(self):
return _Timer, (self.__timer,)
def __getattr__(self, name):
return getattr(self.__timer, name)
class TTLCache(Cache):
"""LRU Cache implementation with per-item time-to-live (TTL) value."""
def __init__(self, maxsize, ttl, timer=time.time, missing=None,
getsizeof=None):
Cache.__init__(self, maxsize, missing, getsizeof)
self.__root = root = _Link()
root.prev = root.next = root
self.__links = collections.OrderedDict()
self.__timer = _Timer(timer)
self.__ttl = ttl
def __contains__(self, key):
try:
link = self.__links[key] # no reordering
except KeyError:
return False
else:
return not (link.expire < self.__timer())
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
try:
link = self.__getlink(key)
except KeyError:
expired = False
else:
expired = link.expire < self.__timer()
if expired:
return self.__missing__(key)
else:
return cache_getitem(self, key)
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
with self.__timer as time:
self.expire(time)
cache_setitem(self, key, value)
try:
link = self.__getlink(key)
except KeyError:
self.__links[key] = link = _Link(key)
else:
link.unlink()
link.expire = time + self.__ttl
link.next = root = self.__root
link.prev = prev = root.prev
prev.next = root.prev = link
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
cache_delitem(self, key)
link = self.__links.pop(key)
link.unlink()
if link.expire < self.__timer():
raise KeyError(key)
def __iter__(self):
root = self.__root
curr = root.next
while curr is not root:
# "freeze" time for iterator access
with self.__timer as time:
if not (curr.expire < time):
yield curr.key
curr = curr.next
def __len__(self):
root = self.__root
curr = root.next
time = self.__timer()
count = len(self.__links)
while curr is not root and curr.expire < time:
count -= 1
curr = curr.next
return count
def __setstate__(self, state):
self.__dict__.update(state)
root = self.__root
root.prev = root.next = root
for link in sorted(self.__links.values(), key=lambda obj: obj.expire):
link.next = root
link.prev = prev = root.prev
prev.next = root.prev = link
self.expire(self.__timer())
def __repr__(self, cache_repr=Cache.__repr__):
with self.__timer as time:
self.expire(time)
return cache_repr(self)
@property
def currsize(self):
with self.__timer as time:
self.expire(time)
return super(TTLCache, self).currsize
@property
def timer(self):
"""The timer function used by the cache."""
return self.__timer
@property
def ttl(self):
"""The time-to-live value of the cache's items."""
return self.__ttl
def expire(self, time=None):
"""Remove expired items from the cache."""
if time is None:
time = self.__timer()
root = self.__root
curr = root.next
links = self.__links
cache_delitem = Cache.__delitem__
while curr is not root and curr.expire < time:
cache_delitem(self, curr.key)
del links[curr.key]
next = curr.next
curr.unlink()
curr = next
def clear(self):
with self.__timer as time:
self.expire(time)
Cache.clear(self)
def get(self, *args, **kwargs):
with self.__timer:
return Cache.get(self, *args, **kwargs)
def pop(self, *args, **kwargs):
with self.__timer:
return Cache.pop(self, *args, **kwargs)
def setdefault(self, *args, **kwargs):
with self.__timer:
return Cache.setdefault(self, *args, **kwargs)
def popitem(self):
"""Remove and return the `(key, value)` pair least recently used that
has not already expired.
"""
with self.__timer as time:
self.expire(time)
try:
key = next(iter(self.__links))
except StopIteration:
raise KeyError('%s is empty' % self.__class__.__name__)
else:
return (key, self.pop(key))
if hasattr(collections.OrderedDict, 'move_to_end'):
def __getlink(self, key):
value = self.__links[key]
self.__links.move_to_end(key)
return value
else:
def __getlink(self, key):
value = self.__links.pop(key)
self.__links[key] = value
return value
#############################################
# END IMPORT OF CACHING LIBRARY #
#############################################
cache = TTLCache(
100, # Up to 100 items
5 * 60 # 5 minute cache life
)
userCache = TTLCache(
2, # Up to 2 items
60 # 1 minute cache life
)
class UnknownUserException(Exception):
pass
def WAAuth(parser):
parser.add_argument('apollo', help='Complete Apollo URL')
parser.add_argument('username', help='WA Username')
parser.add_argument('password', help='WA Password')
def OrgOrGuess(parser):
parser.add_argument('--org_json', type=argparse.FileType("r"), help='Apollo JSON output, source for common name')
parser.add_argument('--org_raw', help='Common Name')
parser.add_argument('--org_id', help='Organism ID')
def CnOrGuess(parser):
OrgOrGuess(parser)
parser.add_argument('--seq_fasta', type=argparse.FileType("r"), help='Fasta file, IDs used as sequence sources')
parser.add_argument('--seq_raw', nargs='*', help='Sequence Names')
def GuessOrg(args, wa):
if args.org_json:
orgs = [x.get('commonName', None)
for x in json.load(args.org_json)]
orgs = [x for x in orgs if x is not None]
return orgs
elif args.org_raw:
org = args.org_raw.strip()
if len(org) > 0:
return [org]
else:
raise Exception("Organism Common Name not provided")
elif args.org_id:
return [wa.organisms.findOrganismById(args.org_id).get('commonName', None)]
else:
raise Exception("Organism Common Name not provided")
def GuessCn(args, wa):
org = GuessOrg(args, wa)
seqs = []
if args.seq_fasta:
# If we have a fasta, pull all rec ids from that.
for rec in SeqIO.parse(args.seq_fasta, 'fasta'):
seqs.append(rec.id)
elif args.seq_raw:
# Otherwise raw list.
seqs = [x.strip() for x in args.seq_raw if len(x.strip()) > 0]
return org, seqs
def AssertUser(user_list):
if len(user_list) == 0:
raise UnknownUserException()
elif len(user_list) == 1:
return user_list[0]
else:
raise Exception("Too many users!")
def AssertAdmin(user):
if user.role == 'ADMIN':
return True
else:
raise Exception("User is not an administrator. Permission denied")
def PermissionCheck(user, org_cn, permission_type):
return any(org["organism"] == org_cn and permission_type in org["permissions"] for org in user.organismPermissions)
def PasswordGenerator(length):
chars = list('qwrtpsdfghjklzxcvbnm')
return ''.join(random.choice(chars) for _ in range(length))
def IsRemoteUser():
if 'GALAXY_WEBAPOLLO_REMOTE_USER' not in os.environ:
return False
value = os.environ['GALAXY_WEBAPOLLO_REMOTE_USER']
if value.lower() in ('true', 't', '1'):
return True
else:
return False
class WebApolloInstance(object):
def __init__(self, url, username, password):
self.apollo_url = url
self.username = username
self.password = password
self.annotations = AnnotationsClient(self)
self.groups = GroupsClient(self)
self.io = IOClient(self)
self.organisms = OrganismsClient(self)
self.users = UsersClient(self)
self.metrics = MetricsClient(self)
self.bio = RemoteRecord(self)
self.status = StatusClient(self)
self.canned_comments = CannedCommentsClient(self)
self.canned_keys = CannedKeysClient(self)
self.canned_values = CannedValuesClient(self)
def __str__(self):
return '<WebApolloInstance at %s>' % self.apollo_url
def requireUser(self, email):
cacheKey = 'user-list'
try:
# Get the cached value
data = userCache[cacheKey]
except KeyError:
# If we hit a key error above, indicating that
# we couldn't find the key, we'll simply re-request
# the data
data = self.users.loadUsers()
userCache[cacheKey] = data
return AssertUser([x for x in data if x.username == email])
class GroupObj(object):
def __init__(self, **kwargs):
self.name = kwargs['name']
if 'id' in kwargs:
self.groupId = kwargs['id']
class UserObj(object):
ROLE_USER = 'USER'
ROLE_ADMIN = 'ADMIN'
def __init__(self, **kwargs):
# Generally expect 'userId', 'firstName', 'lastName', 'username' (email)
for attr in kwargs.keys():
setattr(self, attr, kwargs[attr])
if 'groups' in kwargs:
groups = []
for groupData in kwargs['groups']:
groups.append(GroupObj(**groupData))
self.groups = groups
self.__props = kwargs.keys()
def isAdmin(self):
if hasattr(self, 'role'):
return self.role == self.ROLE_ADMIN
return False
def refresh(self, wa):
# This method requires some sleeping usually.
newU = wa.users.loadUser(self).toDict()
for prop in newU:
setattr(self, prop, newU[prop])
def toDict(self):
data = {}
for prop in self.__props:
data[prop] = getattr(self, prop)
return data
def orgPerms(self):
for orgPer in self.organismPermissions:
if len(orgPer['permissions']) > 2:
orgPer['permissions'] = json.loads(orgPer['permissions'])
yield orgPer
def __str__(self):
return '<User %s: %s %s <%s>>' % (self.userId, self.firstName,
self.lastName, self.username)
class Client(object):
def __init__(self, webapolloinstance, **requestArgs):
self._wa = webapolloinstance
self.__verify = requestArgs.get('verify', True)
self._requestArgs = requestArgs
if 'verify' in self._requestArgs:
del self._requestArgs['verify']
def request(self, clientMethod, data, post_params={}, isJson=True):
url = self._wa.apollo_url + self.CLIENT_BASE + clientMethod
headers = {
'Content-Type': 'application/json'
}
data.update({
'username': self._wa.username,
'password': self._wa.password,
})
r = requests.post(url, data=json.dumps(data), headers=headers,
verify=self.__verify, params=post_params, allow_redirects=False, **self._requestArgs)
if r.status_code == 200 or r.status_code == 302:
if isJson:
d = r.json()
if 'username' in d:
del d['username']
if 'password' in d:
del d['password']
return d
else:
return r.text
# @see self.body for HTTP response body
raise Exception("Unexpected response from apollo %s: %s" %
(r.status_code, r.text))
def get(self, clientMethod, get_params):
url = self._wa.apollo_url + self.CLIENT_BASE + clientMethod
headers = {}
r = requests.get(url, headers=headers, verify=self.__verify,
params=get_params, **self._requestArgs)
if r.status_code == 200:
d = r.json()
if 'username' in d:
del d['username']
if 'password' in d:
del d['password']
return d
# @see self.body for HTTP response body
raise Exception("Unexpected response from apollo %s: %s" %
(r.status_code, r.text))
class MetricsClient(Client):
CLIENT_BASE = '/metrics/'
def getServerMetrics(self):
return self.get('metrics', {})
class AnnotationsClient(Client):
CLIENT_BASE = '/annotationEditor/'
def _update_data(self, data):
if not hasattr(self, '_extra_data'):
raise Exception("Please call setSequence first")
data.update(self._extra_data)
return data
def setSequence(self, sequence, organism):
self._extra_data = {
'sequence': sequence,
'organism': organism,
}
def setDescription(self, featureDescriptions):
data = {
'features': featureDescriptions,
}
data = self._update_data(data)
return self.request('setDescription', data)
def setName(self, uniquename, name):
# TODO
data = {
'features': [
{
'uniquename': uniquename,
'name': name,
}
],
}
data = self._update_data(data)
return self.request('setName', data)
def setNames(self, features):
# TODO
data = {
'features': features,
}
data = self._update_data(data)
return self.request('setName', data)
def setStatus(self, statuses):
# TODO
data = {
'features': statuses,
}
data = self._update_data(data)
return self.request('setStatus', data)
def setSymbol(self, symbols):
data = {
'features': symbols,
}
data.update(self._extra_data)
return self.request('setSymbol', data)
def getComments(self, feature_id):
data = {
'features': [{'uniquename': feature_id}],
}
data = self._update_data(data)
return self.request('getComments', data)
def addComments(self, feature_id, comments):
# TODO: This is probably not great and will delete comments, if I had to guess...
data = {
'features': [
{
'uniquename': feature_id,
'comments': comments
}
],
}
data = self._update_data(data)
return self.request('addComments', data)
def addAttributes(self, feature_id, attributes):
nrps = []
for (key, values) in attributes.items():
for value in values:
nrps.append({
'tag': key,
'value': value
})
data = {
'features': [
{
'uniquename': feature_id,
'non_reserved_properties': nrps
}
]
}
data = self._update_data(data)
return self.request('addAttribute', data)
def deleteAttribute(self, feature_id, key, value):
data = {
'features': [
{
'uniquename': feature_id,
'non_reserved_properties': [
{'tag': key, 'value': value}
]
}
]
}
data = self._update_data(data)
return self.request('addAttribute', data)
def getFeatures(self):
data = self._update_data({})
return self.request('getFeatures', data)
def getSequence(self, uniquename):
data = {
'features': [
{'uniquename': uniquename}
]
}
data = self._update_data(data)
return self.request('getSequence', data)
def addFeature(self, feature, trustme=False):
if not trustme:
raise NotImplementedError("Waiting on better docs from project. If you know what you are doing, pass trustme=True to this function.")
data = {
'features': feature,
}
data = self._update_data(data)
return self.request('addFeature', data)
def addTranscript(self, transcript, trustme=False):
if not trustme:
raise NotImplementedError("Waiting on better docs from project. If you know what you are doing, pass trustme=True to this function.")
data = {}
data.update(transcript)
data = self._update_data(data)
return self.request('addTranscript', data)
# addExon, add/delete/updateComments, addTranscript skipped due to docs
def duplicateTranscript(self, transcriptId):
data = {
'features': [{'uniquename': transcriptId}]
}
data = self._update_data(data)
return self.request('duplicateTranscript', data)
def setTranslationStart(self, uniquename, start):
data = {
'features': [{
'uniquename': uniquename,
'location': {
'fmin': start
}
}]
}
data = self._update_data(data)
return self.request('setTranslationStart', data)
def setTranslationEnd(self, uniquename, end):
data = {
'features': [{
'uniquename': uniquename,
'location': {
'fmax': end
}
}]
}
data = self._update_data(data)
return self.request('setTranslationEnd', data)
def setLongestOrf(self, uniquename):
data = {
'features': [{
'uniquename': uniquename,
}]
}
data = self._update_data(data)
return self.request('setLongestOrf', data)
def setBoundaries(self, uniquename, start, end):
data = {
'features': [{
'uniquename': uniquename,
'location': {
'fmin': start,
'fmax': end,
}
}]
}
data = self._update_data(data)
return self.request('setBoundaries', data)
def getSequenceAlterations(self):
data = {
}
data = self._update_data(data)
return self.request('getSequenceAlterations', data)
def setReadthroughStopCodon(self, uniquename):
data = {
'features': [{
'uniquename': uniquename,
}]
}
data = self._update_data(data)
return self.request('setReadthroughStopCodon', data)
def deleteSequenceAlteration(self, uniquename):
data = {
'features': [{
'uniquename': uniquename,
}]
}
data = self._update_data(data)
return self.request('deleteSequenceAlteration', data)
def flipStrand(self, uniquenames):
data = {
'features': [
{'uniquename': x} for x in uniquenames
]
}
data = self._update_data(data)
return self.request('flipStrand', data)
def mergeExons(self, exonA, exonB):
data = {
'features': [
{'uniquename': exonA},
{'uniquename': exonB},
]
}
data = self._update_data(data)
return self.request('mergeExons', data)
# def splitExon(): pass
def deleteFeatures(self, uniquenames):
assert isinstance(uniquenames, collections.Iterable)
data = {
'features': [
{'uniquename': x} for x in uniquenames
]
}
data = self._update_data(data)
return self.request('deleteFeature', data)
# def deleteExon(): pass
# def makeIntron(self, uniquename, ): pass
def getSequenceSearchTools(self):
return self.get('getSequenceSearchTools', {})
def getCannedComments(self):
return self.get('getCannedComments', {})
def searchSequence(self, searchTool, sequence, database):
data = {
'key': searchTool,
'residues': sequence,
'database_id': database,
}
return self.request('searchSequences', data)
def getGff3(self, uniquenames):
assert isinstance(uniquenames, collections.Iterable)
data = {
'features': [
{'uniquename': x} for x in uniquenames
]
}
data = self._update_data(data)
return self.request('getGff3', data, isJson=False)
class GroupsClient(Client):
CLIENT_BASE = '/group/'
def createGroup(self, name):
data = {'name': name}
return self.request('createGroup', data)
def getOrganismPermissionsForGroup(self, group):
data = {
'id': group.groupId,
'name': group.name,
}
return self.request('getOrganismPermissionsForGroup', data)
def loadGroup(self, group):
return self.loadGroupById(group.groupId)
def loadGroupById(self, groupId):
res = self.request('loadGroups', {'groupId': groupId})
if isinstance(res, list):
# We can only match one, right?
return GroupObj(**res[0])
else:
return res
def loadGroupByName(self, name):
res = self.request('loadGroups', {'name': name})
if isinstance(res, list):
# We can only match one, right?
return GroupObj(**res[0])
else:
return res
def loadGroups(self, group=None):
res = self.request('loadGroups', {})
data = [GroupObj(**x) for x in res]
if group is not None:
data = [x for x in data if x.name == group]
return data
def deleteGroup(self, group):
data = {
'id': group.groupId,
'name': group.name,
}
return self.request('deleteGroup', data)
def updateGroup(self, group, newName):
# TODO: Sure would be nice if modifying ``group.name`` would invoke
# this?
data = {
'id': group.groupId,
'name': newName,
}
return self.request('updateGroup', data)
def updateOrganismPermission(self, group, organismName,
administrate=False, write=False, read=False,
export=False):
data = {
'groupId': group.groupId,
'organism': organismName,
'ADMINISTRATE': administrate,
'WRITE': write,
'EXPORT': export,
'READ': read,
}
return self.request('updateOrganismPermission', data)
def updateMembership(self, group, users):
data = {
'groupId': group.groupId,
'user': [user.email for user in users]
}
return self.request('updateMembership', data)
class IOClient(Client):
CLIENT_BASE = '/IOService/'
def write(self, exportType='FASTA', seqType='peptide',
exportFormat='text', sequences=None, organism=None,
output='text', exportAllSequences=False,
exportGff3Fasta=False):
if exportType not in ('FASTA', 'GFF3'):
raise Exception("exportType must be one of FASTA, GFF3")
if seqType not in ('peptide', 'cds', 'cdna', 'genomic'):
raise Exception("seqType must be one of peptide, cds, dna, genomic")
if exportFormat not in ('gzip', 'text'):
raise Exception("exportFormat must be one of gzip, text")
if output not in ('file', 'text'):
raise Exception("output must be one of file, text")
data = {
'type': exportType,
'seqType': seqType,
'format': exportFormat,
'sequences': sequences,
'organism': organism,
'output': output,
'exportAllSequences': exportAllSequences,
'exportGff3Fasta': exportGff3Fasta,
}
return self.request('write', data, isJson=output == 'file')
def download(self, uuid, outputFormat='gzip'):
if outputFormat.lower() not in ('gzip', 'text'):
raise Exception("outputFormat must be one of file, text")
data = {
'format': outputFormat,
'uuid': uuid,
}
return self.request('write', data)
class StatusClient(Client):
CLIENT_BASE = '/availableStatus/'
def addStatus(self, value):
data = {
'value': value
}
return self.request('createStatus', data)
def findAllStatuses(self):
return self.request('showStatus', {})
def findStatusByValue(self, value):
statuses = self.findAllStatuses()
statuses = [x for x in statuses if x['value'] == value]
if len(statuses) == 0:
raise Exception("Unknown status value")
else:
return statuses[0]
def findStatusById(self, id_number):
statuses = self.findAllStatuses()
statuses = [x for x in statuses if str(x['id']) == str(id_number)]
if len(statuses) == 0:
raise Exception("Unknown ID")
else:
return statuses[0]
def updateStatus(self, id_number, new_value):
data = {
'id': id_number,
'new_value': new_value
}
return self.request('updateStatus', data)
def deleteStatus(self, id_number):
data = {
'id': id_number
}
return self.request('deleteStatus', data)
class CannedCommentsClient(Client):
CLIENT_BASE = '/cannedComment/'
def addComment(self, comment, metadata=""):
data = {
'comment': comment,
'metadata': metadata
}
return self.request('createComment', data)
def findAllComments(self):
return self.request('showComment', {})
def findCommentByValue(self, value):
comments = self.findAllComments()
comments = [x for x in comments if x['comment'] == value]
if len(comments) == 0:
raise Exception("Unknown comment")
else:
return comments[0]
def findCommentById(self, id_number):
comments = self.findAllComments()
comments = [x for x in comments if str(x['id']) == str(id_number)]
if len(comments) == 0:
raise Exception("Unknown ID")
else:
return comments[0]
def updateComment(self, id_number, new_value, metadata=None):
data = {
'id': id_number,
'new_comment': new_value
}
if metadata is not None:
data['metadata'] = metadata
return self.request('updateComment', data)
def deleteComment(self, id_number):
data = {
'id': id_number
}
return self.request('deleteComment', data)
class CannedKeysClient(Client):
CLIENT_BASE = '/cannedKey/'
def addKey(self, key, metadata=""):
data = {
'key': key,
'metadata': metadata
}
return self.request('createKey', data)
def findAllKeys(self):
return self.request('showKey', {})
def findKeyByValue(self, value):
keys = self.findAllKeys()
keys = [x for x in keys if x['label'] == value]
if len(keys) == 0:
raise Exception("Unknown key")
else:
return keys[0]
def findKeyById(self, id_number):
keys = self.findAllKeys()
keys = [x for x in keys if str(x['id']) == str(id_number)]
if len(keys) == 0:
raise Exception("Unknown ID")
else:
return keys[0]
def updateKey(self, id_number, new_key, metadata=None):
data = {
'id': id_number,
'new_key': new_key
}
if metadata is not None:
data['metadata'] = metadata
return self.request('updateKey', data)
def deleteKey(self, id_number):
data = {
'id': id_number
}
return self.request('deleteKey', data)
class CannedValuesClient(Client):
CLIENT_BASE = '/cannedValue/'
def addValue(self, value, metadata=""):
data = {
'value': value,
'metadata': metadata
}
return self.request('createValue', data)
def findAllValues(self):
return self.request('showValue', {})
def findValueByValue(self, value):
values = self.findAllValues()
values = [x for x in values if x['label'] == value]
if len(values) == 0:
raise Exception("Unknown value")
else:
return values[0]
def findValueById(self, id_number):
values = self.findAllValues()
values = [x for x in values if str(x['id']) == str(id_number)]
if len(values) == 0:
raise Exception("Unknown ID")
else:
return values[0]
def updateValue(self, id_number, new_value, metadata=None):
data = {
'id': id_number,
'new_value': new_value
}
if metadata is not None:
data['metadata'] = metadata
return self.request('updateValue', data)
def deleteValue(self, id_number):
data = {
'id': id_number
}
return self.request('deleteValue', data)
class OrganismsClient(Client):
CLIENT_BASE = '/organism/'
def addOrganism(self, commonName, directory, blatdb=None, species=None,
genus=None, public=False):
data = {
'commonName': commonName,
'directory': directory,
'publicMode': public,
}
if blatdb is not None:
data['blatdb'] = blatdb
if genus is not None:
data['genus'] = genus
if species is not None:
data['species'] = species
return self.request('addOrganism', data)
def findAllOrganisms(self):
orgs = self.request('findAllOrganisms', {})
if not isinstance(orgs, (list,)):
orgs = []
return orgs
def findOrganismByCn(self, cn):
orgs = self.findAllOrganisms()
orgs = [x for x in orgs if x['commonName'] == cn]
if len(orgs) == 0:
raise Exception("Unknown common name")
else:
return orgs[0]
def findOrganismById(self, id_number):
orgs = self.findAllOrganisms()
orgs = [x for x in orgs if str(x['id']) == str(id_number)]
if len(orgs) == 0:
raise Exception("Unknown ID")
else:
return orgs[0]
def deleteOrganism(self, organismId):
return self.request('deleteOrganism', {'id': organismId})
def deleteOrganismFeatures(self, organismId):
return self.request('deleteOrganismFeatures', {'id': organismId})
def getSequencesForOrganism(self, commonName):
return self.request('getSequencesForOrganism', {'organism': commonName})
def updateOrganismInfo(self, organismId, commonName, directory, blatdb=None, species=None, genus=None, public=False):
data = {
'id': organismId,
'name': commonName,
'directory': directory,
'publicMode': public,
}
if blatdb is not None:
data['blatdb'] = blatdb
if genus is not None:
data['genus'] = genus
if species is not None:
data['species'] = species
return self.request('updateOrganismInfo', data)
class UsersClient(Client):
CLIENT_BASE = '/user/'
# Real one
# def getOrganismPermissionsForUser(self, user):
# data = {
# 'userId': user.userId,
# }
# return self.request('getOrganismPermissionsForUser', data)
# Utter frigging hack
def getOrganismPermissionsForUser(self, user):
return self.loadUser(user).organismPermissions
def updateOrganismPermission(self, user, organism, administrate=False,
write=False, export=False, read=False):
data = {
'userId': user.userId,
'organism': organism,
'ADMINISTRATE': administrate,
'WRITE': write,
'EXPORT': export,
'READ': read,
}
return self.request('updateOrganismPermission', data)
def loadUser(self, user):
return self.loadUserById(user.userId)
def loadUserById(self, userId):
res = self.request('loadUsers', {'userId': userId})
if isinstance(res, list):
# We can only match one, right?
return UserObj(**res[0])
else:
return res
def loadUsers(self, email=None):
res = self.request('loadUsers', {})
data = [UserObj(**x) for x in res]
if email is not None:
data = [x for x in data if x.username == email]
return data
def addUserToGroup(self, group, user):
data = {'group': group.name, 'userId': user.userId}
return self.request('addUserToGroup', data)
def removeUserFromGroup(self, group, user):
data = {'group': group.name, 'userId': user.userId}
return self.request('removeUserFromGroup', data)
def createUser(self, email, firstName, lastName, newPassword, role="user", groups=None, addToHistory=False):
data = {
'firstName': firstName,
'lastName': lastName,
'email': email,
'role': role,
'groups': [] if groups is None else groups,
# 'availableGroups': [],
'newPassword': newPassword,
# 'organismPermissions': [],
}
returnData = self.request('createUser', data)
if addToHistory and not IsRemoteUser():
f = open("Apollo_credentials.txt", "w")
f.write('Username: %s\tPassword: %s' % (email, newPassword))
return returnData
def assertOrCreateUser(self, email):
try:
gx_user = AssertUser(self.loadUsers(email))
except Exception:
self.createUser(email, email, email, PasswordGenerator(12), role='user', addToHistory=True)
gx_user = AssertUser(self.loadUsers(email))
return gx_user
def deleteUser(self, user):
return self.request('deleteUser', {'userId': user.userId})
def updateUser(self, user, email, firstName, lastName, newPassword):
data = {
'userId': user.userId,
'email': email,
'firstName': firstName,
'lastName': lastName,
'newPassword': newPassword,
}
return self.request('updateUser', data)
class RemoteRecord(Client):
CLIENT_BASE = None
def ParseRecord(self, cn):
org = self._wa.organisms.findOrganismByCn(cn)
self._wa.annotations.setSequence(org['commonName'], org['id'])
data = io.StringIO(self._wa.io.write(
exportType='GFF3',
seqType='genomic',
exportAllSequences=False,
exportGff3Fasta=True,
output="text",
exportFormat="text",
sequences=cn,
))
data.seek(0)
for record in GFF.parse(data):
yield WebApolloSeqRecord(record, self._wa)
class WebApolloSeqRecord(object):
def __init__(self, sr, wa):
self._sr = sr
self._wa = wa
def __dir__(self):
return dir(self._sr)
def __getattr__(self, key):
if key in ('_sr', '_wa'):
return self.__dict__[key]
else:
if key == 'features':
return (WebApolloSeqFeature(x, self._wa)
for x in self._sr.__dict__[key])
else:
return self._sr.__dict__[key]
def __setattr__(self, key, value):
if key in ('_sd', '_wa'):
self.__dict__[key] = value
else:
self._sr.__dict__[key] = value
# Methods acting on the SeqRecord object
class WebApolloSeqFeature(object):
def __init__(self, sf, wa):
self._sf = sf
self._wa = wa
def __dir__(self):
return dir(self._sf)
def __getattr__(self, key):
if key in ('_sf', '_wa'):
return self.__dict__[key]
else:
return self._sf.__dict__[key]
def __setattr__(self, key, value):
if key in ('_sf', '_wa'):
self.__dict__[key] = value
else:
# Methods acting on the SeqFeature object
if key == 'location':
if value.strand != self._sf.location.strand:
self.wa.annotations.flipStrand(
self._sf.qualifiers['ID'][0]
)
self.wa.annotations.setBoundaries(
self._sf.qualifiers['ID'][0],
value.start,
value.end,
)
self._sf.__dict__[key] = value
else:
self._sf.__dict__[key] = value
def _tnType(feature):
if feature.type in ('gene', 'mRNA', 'exon', 'CDS', 'terminator', 'tRNA'):
return feature.type
else:
return 'exon'
def _yieldFeatData(features):
for f in features:
current = {
'location': {
'strand': f.strand,
'fmin': int(f.location.start),
'fmax': int(f.location.end),
},
'type': {
'name': _tnType(f),
'cv': {
'name': 'sequence',
}
},
}
if f.type in ('gene', 'mRNA'):
current['name'] = f.qualifiers.get('Name', [f.id])[0]
if hasattr(f, 'sub_features') and len(f.sub_features) > 0:
current['children'] = [x for x in _yieldFeatData(f.sub_features)]
yield current
def featuresToFeatureSchema(features):
compiled = []
for feature in features:
# if feature.type != 'gene':
# log.warn("Not able to handle %s features just yet...", feature.type)
# continue
for x in _yieldFeatData([feature]):
compiled.append(x)
return compiled
def accessible_organisms(user, orgs):
permissionMap = {
x['organism']: x['permissions']
for x in user.organismPermissions
if 'WRITE' in x['permissions'] or 'READ' in x['permissions'] or 'ADMINISTRATE' in x['permissions'] or user.role == 'ADMIN'
}
if 'error' in orgs:
raise Exception("Error received from Apollo server: \"%s\"" % orgs['error'])
return [
(org['commonName'], org['id'], False)
for org in sorted(orgs, key=lambda x: x['commonName'])
if org['commonName'] in permissionMap
]
def galaxy_list_groups(trans, *args, **kwargs):
email = trans.get_user().email
wa = WebApolloInstance(
os.environ['GALAXY_WEBAPOLLO_URL'],
os.environ['GALAXY_WEBAPOLLO_USER'],
os.environ['GALAXY_WEBAPOLLO_PASSWORD']
)
# Key for cached data
cacheKey = 'groups-' + email
# We don't want to trust "if key in cache" because between asking and fetch
# it might through key error.
if cacheKey not in cache:
# However if it ISN'T there, we know we're safe to fetch + put in
# there.
data = _galaxy_list_groups(wa, *args, **kwargs)
cache[cacheKey] = data
return data
try:
# The cache key may or may not be in the cache at this point, it
# /likely/ is. However we take no chances that it wasn't evicted between
# when we checked above and now, so we reference the object from the
# cache in preparation to return.
data = cache[cacheKey]
return data
except KeyError:
# If access fails due to eviction, we will fail over and can ensure that
# data is inserted.
data = _galaxy_list_groups(wa, *args, **kwargs)
cache[cacheKey] = data
return data
def _galaxy_list_groups(wa, *args, **kwargs):
# Fetch the groups.
group_data = []
for group in wa.groups.loadGroups():
# Reformat
group_data.append((group.name, group.name, False))
return group_data
def galaxy_list_orgs(trans, *args, **kwargs):
email = trans.get_user().email
wa = WebApolloInstance(
os.environ['GALAXY_WEBAPOLLO_URL'],
os.environ['GALAXY_WEBAPOLLO_USER'],
os.environ['GALAXY_WEBAPOLLO_PASSWORD']
)
try:
gx_user = wa.requireUser(email)
except UnknownUserException:
return []
# Key for cached data
cacheKey = 'orgs-' + email
if cacheKey not in cache:
data = _galaxy_list_orgs(wa, gx_user, *args, **kwargs)
cache[cacheKey] = data
return data
try:
data = cache[cacheKey]
return data
except KeyError:
data = _galaxy_list_orgs(wa, gx_user, *args, **kwargs)
cache[cacheKey] = data
return data
def _galaxy_list_orgs(wa, gx_user, *args, **kwargs):
# Fetch all organisms
all_orgs = wa.organisms.findAllOrganisms()
# Figure out which are accessible to the user
orgs = accessible_organisms(gx_user, all_orgs)
# Return org list
return orgs
def galaxy_list_users(trans, *args, **kwargs):
email = trans.get_user().email
wa = WebApolloInstance(
os.environ['GALAXY_WEBAPOLLO_URL'],
os.environ['GALAXY_WEBAPOLLO_USER'],
os.environ['GALAXY_WEBAPOLLO_PASSWORD']
)
# Assert that the email exists in apollo
try:
gx_user = wa.requireUser(email)
except UnknownUserException:
return []
# Key for cached data
cacheKey = 'users-' + email
# We don't want to trust "if key in cache" because between asking and fetch
# it might through key error.
if cacheKey not in cache:
# However if it ISN'T there, we know we're safe to fetch + put in
# there.
data = _galaxy_list_users(wa, gx_user, *args, **kwargs)
cache[cacheKey] = data
return data
try:
# The cache key may or may not be in the cache at this point, it
# /likely/ is. However we take no chances that it wasn't evicted between
# when we checked above and now, so we reference the object from the
# cache in preparation to return.
data = cache[cacheKey]
return data
except KeyError:
# If access fails due to eviction, we will fail over and can ensure that
# data is inserted.
data = _galaxy_list_users(wa, gx_user, *args, **kwargs)
cache[cacheKey] = data
return data
def _galaxy_list_users(wa, gx_user, *args, **kwargs):
# Fetch the users.
user_data = []
for user in wa.users.loadUsers():
# Reformat
user_data.append((user.username, user.username, False))
return user_data
# This is all for implementing the command line interface for testing.
class obj(object):
pass
class fakeTrans(object):
def __init__(self, username):
self.un = username
def get_user(self):
o = obj()
o.email = self.un
return o
def retry(closure, sleep=1, limit=5):
"""
Apollo has the bad habit of returning 500 errors if you call APIs
too quickly, largely because of the unholy things that happen in
grails.
To deal with the fact that we cannot send an addComments call too
quickly after a createFeature call, we have this function that will
keep calling a closure until it works.
"""
count = 0
while True:
count += 1
if count >= limit:
return False
try:
# Try calling it
closure()
# If successful, exit
return True
except Exception as e:
log.info(str(e)[0:100])
time.sleep(sleep)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Test access to apollo server')
parser.add_argument('email', help='Email of user to test')
parser.add_argument('--action', choices=['org', 'group', 'users'], default='org', help='Data set to test, fetch a list of groups or users known to the requesting user.')
args = parser.parse_args()
trans = fakeTrans(args.email)
if args.action == 'org':
for f in galaxy_list_orgs(trans):
print(f)
elif args.action == 'group':
for f in galaxy_list_groups(trans):
print(f)
else:
for f in galaxy_list_users(trans):
print(f)
|
"""
Salidas
Doceavo-->int-->doc
Suma-->int-->suma
"""
doc=0
for a in range(1,13):
if(a>=1):
doc=5*a+1
if(doc==61):
suma=(doc+6)*6
print("a12= "+str(doc))
print("suma= "+str(suma))
elif(a==13):
break
|
#!/usr/bin/env python
import os
import requests
# local configuration
remote_data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'remote')
# URLs at which data can be found
csv_url_summary_stats = 'http://publishingstats.iatistandard.org/summary_stats.csv'
csv_url_humanitarian_stats = 'http://publishingstats.iatistandard.org/humanitarian.csv'
with open(os.path.join(remote_data_path, 'summary_stats.csv'), 'wb') as f:
# load the data to write to the file
# TODO: Add error handling - URL loading
response = requests.get(csv_url_summary_stats)
if not response.ok:
print('There was a problem loading the Summary Statistics data')
# TODO: Add error handling - file writing
f.write(response.text.encode('utf-8'))
with open(os.path.join(remote_data_path, 'humanitarian.csv'), 'wb') as f:
# load the data to write to the file
# TODO: Add error handling - URL loading
response = requests.get(csv_url_humanitarian_stats)
if not response.ok:
print('There was a problem loading the Humanitarian Statistics data')
# TODO: Add error handling - file writing
f.write(response.text.encode('utf-8'))
# TODO: Add mention of __main__ and main()
|
import io
import json
import re
from collections import defaultdict
from datetime import datetime
from braces.views import JsonRequestResponseMixin
from couchdbkit import ResourceNotFound
from corehq.apps.registration.forms import MobileWorkerAccountConfirmationForm
from corehq.apps.users.account_confirmation import send_account_confirmation_if_necessary
from corehq.util import get_document_or_404
from corehq.util.metrics import metrics_counter
from couchexport.models import Format
from couchexport.writers import Excel2007ExportWriter
from django.conf import settings
from django.contrib import messages
from django.contrib.humanize.templatetags.humanize import naturaltime
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseRedirect
from django.http.response import HttpResponseServerError, JsonResponse
from django.shortcuts import redirect, render
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_noop
from django.views.decorators.http import require_GET, require_POST
from django.views.generic import TemplateView, View
from django_prbac.exceptions import PermissionDenied
from django_prbac.utils import has_privilege
from djng.views.mixins import JSONResponseMixin, allow_remote_invocation
from memoized import memoized
from casexml.apps.phone.models import SyncLogSQL
from corehq import privileges
from corehq.apps.accounting.async_handlers import Select2BillingInfoHandler
from corehq.apps.accounting.decorators import requires_privilege_with_fallback
from corehq.apps.accounting.models import (
BillingAccount,
BillingAccountType,
EntryPoint,
)
from corehq.apps.accounting.utils import domain_has_privilege
from corehq.apps.custom_data_fields.edit_entity import CustomDataEditor
from corehq.apps.custom_data_fields.models import CUSTOM_DATA_FIELD_PREFIX
from corehq.apps.domain.decorators import domain_admin_required
from corehq.apps.domain.views.base import DomainViewMixin
from corehq.apps.es import FormES
from corehq.apps.groups.models import Group
from corehq.apps.hqwebapp.async_handler import AsyncHandlerMixin
from corehq.apps.hqwebapp.crispy import make_form_readonly
from corehq.apps.hqwebapp.decorators import use_multiselect
from corehq.apps.hqwebapp.utils import get_bulk_upload_form
from corehq.apps.locations.analytics import users_have_locations
from corehq.apps.locations.models import SQLLocation
from corehq.apps.locations.permissions import (
location_safe,
user_can_access_location_id,
)
from corehq.apps.ota.utils import demo_restore_date_created, turn_off_demo_mode
from corehq.apps.sms.models import SelfRegistrationInvitation
from corehq.apps.sms.verify import initiate_sms_verification_workflow
from corehq.apps.user_importer.importer import (
UserUploadError,
check_headers,
)
from corehq.apps.user_importer.tasks import import_users_and_groups
from corehq.apps.users.analytics import get_search_users_in_domain_es_query
from corehq.apps.users.dbaccessors.all_commcare_users import get_user_docs_by_username, user_exists
from corehq.apps.users.decorators import (
require_can_edit_commcare_users,
require_can_edit_or_view_commcare_users,
)
from corehq.apps.users.exceptions import InvalidMobileWorkerRequest
from corehq.apps.users.forms import (
CommCareAccountForm,
CommCareUserFilterForm,
CommCareUserFormSet,
CommtrackUserForm,
ConfirmExtraUserChargesForm,
MultipleSelectionForm,
NewMobileWorkerForm,
SelfRegistrationForm,
SetUserPasswordForm,
)
from corehq.apps.users.models import CommCareUser, CouchUser
from corehq.apps.users.tasks import (
bulk_download_users_async,
reset_demo_user_restore_task,
turn_on_demo_mode_task,
bulk_download_usernames_async,
)
from corehq.apps.users.util import (
can_add_extra_mobile_workers,
format_username,
raw_username,
)
from corehq.apps.users.views import (
BaseEditUserView,
BaseUserSettingsView,
get_domain_languages,
)
from corehq.const import GOOGLE_PLAY_STORE_COMMCARE_URL, USER_DATE_FORMAT
from corehq.toggles import FILTERED_BULK_USER_DOWNLOAD, TWO_STAGE_USER_PROVISIONING
from corehq.util.dates import iso_string_to_datetime
from corehq.util.workbook_json.excel import (
WorkbookJSONError,
WorksheetNotFound,
get_workbook,
)
from dimagi.utils.web import json_response
from soil import DownloadBase
from soil.exceptions import TaskFailedError
from soil.util import expose_cached_download, get_download_context
from .custom_data_fields import UserFieldsView
BULK_MOBILE_HELP_SITE = ("https://confluence.dimagi.com/display/commcarepublic"
"/Create+and+Manage+CommCare+Mobile+Workers#Createand"
"ManageCommCareMobileWorkers-B.UseBulkUploadtocreatem"
"ultipleusersatonce")
DEFAULT_USER_LIST_LIMIT = 10
BAD_MOBILE_USERNAME_REGEX = re.compile("[^A-Za-z0-9.+-_]")
def _can_edit_workers_location(web_user, mobile_worker):
if web_user.has_permission(mobile_worker.domain, 'access_all_locations'):
return True
loc_id = mobile_worker.location_id
if not loc_id:
return False
return user_can_access_location_id(mobile_worker.domain, web_user, loc_id)
@location_safe
class EditCommCareUserView(BaseEditUserView):
urlname = "edit_commcare_user"
page_title = ugettext_noop("Edit Mobile Worker")
@property
def page_name(self):
if self.request.is_view_only:
return _("Edit Mobile Worker (View Only)")
return self.page_title
@property
def template_name(self):
if self.editable_user.is_deleted():
return "users/deleted_account.html"
else:
return "users/edit_commcare_user.html"
@use_multiselect
@method_decorator(require_can_edit_or_view_commcare_users)
def dispatch(self, request, *args, **kwargs):
return super(EditCommCareUserView, self).dispatch(request, *args, **kwargs)
@property
def main_context(self):
context = super(EditCommCareUserView, self).main_context
context.update({
'edit_user_form_title': self.edit_user_form_title,
'strong_mobile_passwords': self.request.project.strong_mobile_passwords,
'implement_password_obfuscation': settings.OBFUSCATE_PASSWORD_FOR_NIC_COMPLIANCE,
'has_any_sync_logs': self.has_any_sync_logs,
})
return context
@property
def has_any_sync_logs(self):
return SyncLogSQL.objects.filter(user_id=self.editable_user_id).exists()
@property
@memoized
def editable_user(self):
try:
user = CouchUser.get_by_user_id(self.editable_user_id, self.domain)
except (ResourceNotFound, CouchUser.AccountTypeError, KeyError):
raise Http404()
if not user or not _can_edit_workers_location(self.couch_user, user):
raise Http404()
return user
@property
def edit_user_form_title(self):
return _("Information for %s") % self.editable_user.human_friendly_name
@property
def is_currently_logged_in_user(self):
return self.editable_user_id == self.couch_user._id
@property
def is_delete_allowed(self):
from corehq.apps.couch_sql_migration.progress import couch_sql_migration_in_progress
return not couch_sql_migration_in_progress(self.domain)
@property
@memoized
def reset_password_form(self):
return SetUserPasswordForm(self.request.project, self.editable_user_id, user="")
@property
@memoized
def groups(self):
if not self.editable_user:
return []
return Group.by_user_id(self.editable_user_id)
@property
@memoized
def all_groups(self):
# note: will slow things down if there are loads of groups. worth it?
# justification: ~every report already does this.
return Group.by_domain(self.domain)
@property
@memoized
def group_form(self):
form = MultipleSelectionForm(initial={
'selected_ids': [g._id for g in self.groups],
})
form.fields['selected_ids'].choices = [(g._id, g.name) for g in self.all_groups]
return form
@property
@memoized
def commtrack_form(self):
if self.request.method == "POST" and self.request.POST['form_type'] == "commtrack":
return CommtrackUserForm(self.request.POST, domain=self.domain)
# currently only support one location on the UI
linked_loc = self.editable_user.location
initial_id = linked_loc._id if linked_loc else None
program_id = self.editable_user.get_domain_membership(self.domain).program_id
assigned_locations = self.editable_user.assigned_location_ids
return CommtrackUserForm(
domain=self.domain,
initial={
'primary_location': initial_id,
'program_id': program_id,
'assigned_locations': assigned_locations}
)
@property
def page_context(self):
if self.request.is_view_only:
make_form_readonly(self.commtrack_form)
make_form_readonly(self.form_user_update.user_form)
make_form_readonly(self.form_user_update.custom_data.form)
context = {
'are_groups': bool(len(self.all_groups)),
'groups_url': reverse('all_groups', args=[self.domain]),
'group_form': self.group_form,
'reset_password_form': self.reset_password_form,
'is_currently_logged_in_user': self.is_currently_logged_in_user,
'is_delete_allowed': self.is_delete_allowed,
'data_fields_form': self.form_user_update.custom_data.form,
'can_use_inbound_sms': domain_has_privilege(self.domain, privileges.INBOUND_SMS),
'can_create_groups': (
self.request.couch_user.has_permission(self.domain, 'edit_groups') and
self.request.couch_user.has_permission(self.domain, 'access_all_locations')
),
'needs_to_downgrade_locations': (
users_have_locations(self.domain) and
not has_privilege(self.request, privileges.LOCATIONS)
),
'demo_restore_date': naturaltime(demo_restore_date_created(self.editable_user)),
'hide_password_feedback': settings.ENABLE_DRACONIAN_SECURITY_FEATURES,
'group_names': [g.name for g in self.groups],
}
if self.commtrack_form.errors:
messages.error(self.request, _(
"There were some errors while saving user's locations. Please check the 'Locations' tab"
))
if self.domain_object.commtrack_enabled or self.domain_object.uses_locations:
context.update({
'commtrack_enabled': self.domain_object.commtrack_enabled,
'uses_locations': self.domain_object.uses_locations,
'commtrack': {
'update_form': self.commtrack_form,
},
})
return context
@property
def user_role_choices(self):
return [('none', _('(none)'))] + self.editable_role_choices
@property
@memoized
def form_user_update(self):
if (self.request.method == "POST"
and self.request.POST['form_type'] == "update-user"
and not self.request.is_view_only):
data = self.request.POST
else:
data = None
form = CommCareUserFormSet(data=data, domain=self.domain,
editable_user=self.editable_user, request_user=self.request.couch_user)
form.user_form.load_language(language_choices=get_domain_languages(self.domain))
if self.can_change_user_roles or self.couch_user.can_view_roles():
form.user_form.load_roles(current_role=self.existing_role, role_choices=self.user_role_choices)
else:
del form.user_form.fields['role']
return form
@property
def parent_pages(self):
return [{
'title': MobileWorkerListView.page_title,
'url': reverse(MobileWorkerListView.urlname, args=[self.domain]),
}]
def post(self, request, *args, **kwargs):
if self.request.is_view_only:
messages.error(
request,
_("You do not have permission to update Mobile Workers.")
)
return super(EditCommCareUserView, self).get(request, *args, **kwargs)
if self.request.POST['form_type'] == "add-phonenumber":
phone_number = self.request.POST['phone_number']
phone_number = re.sub(r'\s', '', phone_number)
if re.match(r'\d+$', phone_number):
self.editable_user.add_phone_number(phone_number)
self.editable_user.save(spawn_task=True)
messages.success(request, _("Phone number added."))
else:
messages.error(request, _("Please enter digits only."))
return super(EditCommCareUserView, self).post(request, *args, **kwargs)
class ConfirmBillingAccountForExtraUsersView(BaseUserSettingsView, AsyncHandlerMixin):
urlname = 'extra_users_confirm_billing'
template_name = 'users/extra_users_confirm_billing.html'
page_title = ugettext_noop("Confirm Billing Information")
async_handlers = [
Select2BillingInfoHandler,
]
@property
@memoized
def account(self):
account = BillingAccount.get_or_create_account_by_domain(
self.domain,
created_by=self.couch_user.username,
account_type=BillingAccountType.USER_CREATED,
entry_point=EntryPoint.SELF_STARTED,
)[0]
return account
@property
@memoized
def billing_info_form(self):
if self.request.method == 'POST':
return ConfirmExtraUserChargesForm(
self.account, self.domain, self.request.couch_user.username, data=self.request.POST
)
return ConfirmExtraUserChargesForm(self.account, self.domain, self.request.couch_user.username)
@property
def page_context(self):
return {
'billing_info_form': self.billing_info_form,
}
@method_decorator(domain_admin_required)
def dispatch(self, request, *args, **kwargs):
if self.account.date_confirmed_extra_charges is not None:
return HttpResponseRedirect(reverse(MobileWorkerListView.urlname, args=[self.domain]))
return super(ConfirmBillingAccountForExtraUsersView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if self.billing_info_form.is_valid():
is_saved = self.billing_info_form.save()
if not is_saved:
messages.error(
request, _("It appears that there was an issue updating your contact information. "
"We've been notified of the issue. Please try submitting again, and if the problem "
"persists, please try in a few hours."))
else:
messages.success(
request, _("Billing contact information was successfully confirmed. "
"You may now add additional Mobile Workers.")
)
return HttpResponseRedirect(reverse(
MobileWorkerListView.urlname, args=[self.domain]
))
return self.get(request, *args, **kwargs)
@require_can_edit_commcare_users
@location_safe
@require_POST
def delete_commcare_user(request, domain, user_id):
user = CommCareUser.get_by_user_id(user_id, domain)
if not _can_edit_workers_location(request.couch_user, user):
raise PermissionDenied()
if (user.user_location_id and
SQLLocation.objects.get_or_None(location_id=user.user_location_id,
user_id=user._id)):
messages.error(request, _("This is a location user. You must delete the "
"corresponding location before you can delete this user."))
return HttpResponseRedirect(reverse(EditCommCareUserView.urlname, args=[domain, user_id]))
user.retire()
messages.success(request, "User %s has been deleted. All their submissions and cases will be permanently deleted in the next few minutes" % user.username)
return HttpResponseRedirect(reverse(MobileWorkerListView.urlname, args=[domain]))
@require_can_edit_commcare_users
@location_safe
@require_POST
def force_user_412(request, domain, user_id):
user = CommCareUser.get_by_user_id(user_id, domain)
if not _can_edit_workers_location(request.couch_user, user):
raise PermissionDenied()
metrics_counter('commcare.force_user_412.count', tags={'domain': domain})
SyncLogSQL.objects.filter(user_id=user_id).delete()
messages.success(
request,
"Mobile Worker {}'s device data will be hard refreshed the next time they sync."
.format(user.human_friendly_name)
)
return HttpResponseRedirect(reverse(EditCommCareUserView.urlname, args=[domain, user_id]) + '#user-permanent')
@require_can_edit_commcare_users
@require_POST
def restore_commcare_user(request, domain, user_id):
user = CommCareUser.get_by_user_id(user_id, domain)
success, message = user.unretire()
if success:
messages.success(request, "User %s and all their submissions have been restored" % user.username)
else:
messages.error(request, message)
return HttpResponseRedirect(reverse(EditCommCareUserView.urlname, args=[domain, user_id]))
@require_can_edit_commcare_users
@require_POST
def toggle_demo_mode(request, domain, user_id):
user = CommCareUser.get_by_user_id(user_id, domain)
demo_mode = request.POST.get('demo_mode', 'no')
demo_mode = True if demo_mode == 'yes' else False
edit_user_url = reverse(EditCommCareUserView.urlname, args=[domain, user_id])
# handle bad POST param
if user.is_demo_user == demo_mode:
warning = _("User is already in Demo mode!") if user.is_demo_user else _("User is not in Demo mode!")
messages.warning(request, warning)
return HttpResponseRedirect(edit_user_url)
if demo_mode:
download = DownloadBase()
res = turn_on_demo_mode_task.delay(user.get_id, domain)
download.set_task(res)
return HttpResponseRedirect(
reverse(
DemoRestoreStatusView.urlname,
args=[domain, download.download_id, user_id]
)
)
else:
from corehq.apps.app_manager.views.utils import unset_practice_mode_configured_apps, \
get_practice_mode_configured_apps
# if the user is being used as practice user on any apps, check/ask for confirmation
apps = get_practice_mode_configured_apps(domain)
confirm_turn_off = True if (request.POST.get('confirm_turn_off', 'no')) == 'yes' else False
if apps and not confirm_turn_off:
return HttpResponseRedirect(reverse(ConfirmTurnOffDemoModeView.urlname, args=[domain, user_id]))
turn_off_demo_mode(user)
unset_practice_mode_configured_apps(domain, user.get_id)
messages.success(request, _("Successfully turned off demo mode!"))
return HttpResponseRedirect(edit_user_url)
class BaseManageCommCareUserView(BaseUserSettingsView):
@method_decorator(require_can_edit_commcare_users)
def dispatch(self, request, *args, **kwargs):
return super(BaseManageCommCareUserView, self).dispatch(request, *args, **kwargs)
@property
def parent_pages(self):
return [{
'title': MobileWorkerListView.page_title,
'url': reverse(MobileWorkerListView.urlname, args=[self.domain]),
}]
class ConfirmTurnOffDemoModeView(BaseManageCommCareUserView):
template_name = 'users/confirm_turn_off_demo_mode.html'
urlname = 'confirm_turn_off_demo_mode'
page_title = ugettext_noop("Turn off Demo mode")
@property
def page_context(self):
from corehq.apps.app_manager.views.utils import get_practice_mode_configured_apps
user_id = self.kwargs.pop('couch_user_id')
user = CommCareUser.get_by_user_id(user_id, self.domain)
practice_apps = get_practice_mode_configured_apps(self.domain, user_id)
return {
'commcare_user': user,
'practice_apps': practice_apps,
}
def page_url(self):
return reverse(self.urlname, args=self.args, kwargs=self.kwargs)
class DemoRestoreStatusView(BaseManageCommCareUserView):
urlname = 'demo_restore_status'
page_title = ugettext_noop('Demo User Status')
def dispatch(self, request, *args, **kwargs):
return super(DemoRestoreStatusView, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
context = super(DemoRestoreStatusView, self).main_context
context.update({
'domain': self.domain,
'download_id': kwargs['download_id'],
'poll_url': reverse('demo_restore_job_poll', args=[self.domain, kwargs['download_id']]),
'title': _("Demo User status"),
'progress_text': _("Getting latest restore data, please wait"),
'error_text': _("There was an unexpected error! Please try again or report an issue."),
'next_url': reverse(EditCommCareUserView.urlname, args=[self.domain, kwargs['user_id']]),
'next_url_text': _("Go back to Edit Mobile Worker"),
})
return render(request, 'hqwebapp/soil_status_full.html', context)
def page_url(self):
return reverse(self.urlname, args=self.args, kwargs=self.kwargs)
@require_can_edit_commcare_users
def demo_restore_job_poll(request, domain, download_id, template="users/mobile/partials/demo_restore_status.html"):
try:
context = get_download_context(download_id)
except TaskFailedError:
return HttpResponseServerError()
context.update({
'on_complete_short': _('Done'),
'on_complete_long': _('User is now in Demo mode with latest restore!'),
})
return render(request, template, context)
@require_can_edit_commcare_users
@require_POST
def reset_demo_user_restore(request, domain, user_id):
user = CommCareUser.get_by_user_id(user_id, domain)
if not user.is_demo_user:
warning = _("The user is not a demo user.")
messages.warning(request, warning)
return HttpResponseRedirect(reverse(EditCommCareUserView.urlname, args=[domain, user_id]))
download = DownloadBase()
res = reset_demo_user_restore_task.delay(user.get_id, domain)
download.set_task(res)
return HttpResponseRedirect(
reverse(
DemoRestoreStatusView.urlname,
args=[domain, download.download_id, user_id]
)
)
@require_can_edit_commcare_users
@require_POST
def update_user_groups(request, domain, couch_user_id):
form = MultipleSelectionForm(request.POST)
form.fields['selected_ids'].choices = [(id, 'throwaway') for id in Group.ids_by_domain(domain)]
if form.is_valid():
user = CommCareUser.get(couch_user_id)
assert user.doc_type == "CommCareUser"
assert user.domain == domain
user.set_groups(form.cleaned_data['selected_ids'])
messages.success(request, _("User groups updated!"))
else:
messages.error(request, _("Form not valid. A group may have been deleted while you were viewing this page"
"Please try again."))
return HttpResponseRedirect(reverse(EditCommCareUserView.urlname, args=[domain, couch_user_id]))
@require_can_edit_commcare_users
@require_POST
def update_user_data(request, domain, couch_user_id):
user_data = request.POST["user-data"]
if user_data:
updated_data = json.loads(user_data)
user = CommCareUser.get(couch_user_id)
assert user.doc_type == "CommCareUser"
assert user.domain == domain
user.user_data = updated_data
user.save(spawn_task=True)
messages.success(request, "User data updated!")
return HttpResponseRedirect(reverse(EditCommCareUserView.urlname, args=[domain, couch_user_id]))
@location_safe
class MobileWorkerListView(JSONResponseMixin, BaseUserSettingsView):
template_name = 'users/mobile_workers.html'
urlname = 'mobile_workers'
page_title = ugettext_noop("Mobile Workers")
@method_decorator(require_can_edit_or_view_commcare_users)
def dispatch(self, *args, **kwargs):
return super(MobileWorkerListView, self).dispatch(*args, **kwargs)
@property
@memoized
def can_access_all_locations(self):
return self.couch_user.has_permission(self.domain, 'access_all_locations')
@property
def can_bulk_edit_users(self):
return has_privilege(self.request, privileges.BULK_USER_MANAGEMENT) and not self.request.is_view_only
@property
def can_add_extra_users(self):
return can_add_extra_mobile_workers(self.request)
@property
@memoized
def new_mobile_worker_form(self):
if self.request.method == "POST":
return NewMobileWorkerForm(self.request.project, self.couch_user, self.request.POST)
return NewMobileWorkerForm(self.request.project, self.couch_user)
@property
@memoized
def custom_data(self):
return CustomDataEditor(
field_view=UserFieldsView,
domain=self.domain,
post_dict=self.request.POST if self.request.method == "POST" else None,
required_only=True,
ko_model="custom_fields",
)
@property
def page_context(self):
if FILTERED_BULK_USER_DOWNLOAD.enabled(self.domain):
bulk_download_url = reverse(FilteredUserDownload.urlname, args=[self.domain])
else:
bulk_download_url = reverse("download_commcare_users", args=[self.domain])
return {
'new_mobile_worker_form': self.new_mobile_worker_form,
'custom_fields_form': self.custom_data.form,
'custom_field_slugs': [f.slug for f in self.custom_data.fields],
'can_bulk_edit_users': self.can_bulk_edit_users,
'can_add_extra_users': self.can_add_extra_users,
'can_access_all_locations': self.can_access_all_locations,
'draconian_security': settings.ENABLE_DRACONIAN_SECURITY_FEATURES,
'pagination_limit_cookie_name': (
'hq.pagination.limit.mobile_workers_list.%s' % self.domain),
'can_edit_billing_info': self.request.couch_user.is_domain_admin(self.domain),
'strong_mobile_passwords': self.request.project.strong_mobile_passwords,
'implement_password_obfuscation': settings.OBFUSCATE_PASSWORD_FOR_NIC_COMPLIANCE,
'bulk_download_url': bulk_download_url,
}
@property
@memoized
def query(self):
return self.request.GET.get('query')
@allow_remote_invocation
def check_username(self, in_data):
try:
username = in_data['username'].strip()
except KeyError:
return HttpResponseBadRequest('You must specify a username')
if username == 'admin' or username == 'demo_user':
return {'error': _('Username {} is reserved.').format(username)}
try:
validate_email("{}@example.com".format(username))
if BAD_MOBILE_USERNAME_REGEX.search(username) is not None:
raise ValidationError("Username contained an invalid character")
except ValidationError:
if '..' in username:
return {
'error': _("Username may not contain consecutive . (period).")
}
if username.endswith('.'):
return {
'error': _("Username may not end with a . (period).")
}
return {
'error': _("Username may not contain special characters.")
}
full_username = format_username(username, self.domain)
exists = user_exists(full_username)
if exists.exists:
if exists.is_deleted:
result = {'warning': _('Username {} belonged to a user that was deleted.'
' Reusing it may have unexpected consequences.').format(username)}
else:
result = {'error': _('Username {} is already taken').format(username)}
else:
result = {'success': _('Username {} is available').format(username)}
return result
@allow_remote_invocation
def create_mobile_worker(self, in_data):
if self.request.is_view_only:
return {
'error': _("You do not have permission to create mobile workers.")
}
try:
self._ensure_proper_request(in_data)
form_data = self._construct_form_data(in_data)
except InvalidMobileWorkerRequest as e:
return {
'error': str(e)
}
self.request.POST = form_data
is_valid = lambda: self.new_mobile_worker_form.is_valid() and self.custom_data.is_valid()
if not is_valid():
return {'error': _("Forms did not validate")}
couch_user = self._build_commcare_user()
if self.new_mobile_worker_form.cleaned_data['send_account_confirmation_email']:
send_account_confirmation_if_necessary(couch_user)
return {
'success': True,
'user_id': couch_user.userID,
}
def _build_commcare_user(self):
username = self.new_mobile_worker_form.cleaned_data['username']
password = self.new_mobile_worker_form.cleaned_data['new_password']
first_name = self.new_mobile_worker_form.cleaned_data['first_name']
email = self.new_mobile_worker_form.cleaned_data['email']
last_name = self.new_mobile_worker_form.cleaned_data['last_name']
location_id = self.new_mobile_worker_form.cleaned_data['location_id']
is_account_confirmed = not self.new_mobile_worker_form.cleaned_data['force_account_confirmation']
return CommCareUser.create(
self.domain,
username,
password,
email=email,
device_id="Generated from HQ",
first_name=first_name,
last_name=last_name,
user_data=self.custom_data.get_data_to_save(),
is_account_confirmed=is_account_confirmed,
location=SQLLocation.objects.get(location_id=location_id) if location_id else None,
)
def _ensure_proper_request(self, in_data):
if not self.can_add_extra_users:
raise InvalidMobileWorkerRequest(_("No Permission."))
if 'user' not in in_data:
raise InvalidMobileWorkerRequest(_("Please provide mobile worker data."))
return None
def _construct_form_data(self, in_data):
try:
user_data = in_data['user']
form_data = {
'username': user_data.get('username'),
'new_password': user_data.get('password'),
'first_name': user_data.get('first_name'),
'last_name': user_data.get('last_name'),
'location_id': user_data.get('location_id'),
'email': user_data.get('email'),
'force_account_confirmation': user_data.get('force_account_confirmation'),
'send_account_confirmation_email': user_data.get('send_account_confirmation_email'),
'domain': self.domain,
}
for k, v in user_data.get('custom_fields', {}).items():
form_data["{}-{}".format(CUSTOM_DATA_FIELD_PREFIX, k)] = v
return form_data
except Exception as e:
raise InvalidMobileWorkerRequest(_("Check your request: {}".format(e)))
@require_can_edit_commcare_users
@require_POST
@location_safe
def activate_commcare_user(request, domain, user_id):
return _modify_user_status(request, domain, user_id, True)
@require_can_edit_commcare_users
@require_POST
@location_safe
def deactivate_commcare_user(request, domain, user_id):
return _modify_user_status(request, domain, user_id, False)
def _modify_user_status(request, domain, user_id, is_active):
user = CommCareUser.get_by_user_id(user_id, domain)
if (not _can_edit_workers_location(request.couch_user, user)
or (is_active and not can_add_extra_mobile_workers(request))):
return json_response({
'error': _("No Permission."),
})
if not is_active and user.user_location_id:
return json_response({
'error': _("This is a location user, archive or delete the "
"corresponding location to deactivate it."),
})
user.is_active = is_active
user.save(spawn_task=True)
return json_response({
'success': True,
})
@require_can_edit_commcare_users
@require_POST
@location_safe
def send_confirmation_email(request, domain, user_id):
user = CommCareUser.get_by_user_id(user_id, domain)
send_account_confirmation_if_necessary(user)
return JsonResponse(data={'success': True})
@require_can_edit_or_view_commcare_users
@require_GET
@location_safe
def paginate_mobile_workers(request, domain):
limit = int(request.GET.get('limit', 10))
page = int(request.GET.get('page', 1))
query = request.GET.get('query')
deactivated_only = json.loads(request.GET.get('showDeactivatedUsers', "false"))
def _user_query(search_string, page, limit):
user_es = get_search_users_in_domain_es_query(
domain=domain, search_string=search_string,
offset=page * limit, limit=limit)
if not request.couch_user.has_permission(domain, 'access_all_locations'):
loc_ids = (SQLLocation.objects.accessible_to_user(domain, request.couch_user)
.location_ids())
user_es = user_es.location(list(loc_ids))
return user_es.mobile_users()
# backend pages start at 0
users_query = _user_query(query, page - 1, limit)
# run with a blank query to fetch total records with same scope as in search
if deactivated_only:
users_query = users_query.show_only_inactive()
users_data = users_query.source([
'_id',
'first_name',
'last_name',
'base_username',
'created_on',
'is_active',
'is_account_confirmed',
]).run()
users = users_data.hits
def _status_string(user_data):
if user_data['is_active']:
return _('Active')
elif user_data['is_account_confirmed']:
return _('Deactivated')
else:
return _('Pending Confirmation')
for user in users:
date_registered = user.pop('created_on', '')
if date_registered:
date_registered = iso_string_to_datetime(date_registered).strftime(USER_DATE_FORMAT)
# make sure these are always set and default to true
user['is_active'] = user.get('is_active', True)
user['is_account_confirmed'] = user.get('is_account_confirmed', True)
user.update({
'username': user.pop('base_username', ''),
'user_id': user.pop('_id'),
'date_registered': date_registered,
'status': _status_string(user),
})
return json_response({
'users': users,
'total': users_data.total,
})
class CreateCommCareUserModal(JsonRequestResponseMixin, DomainViewMixin, View):
template_name = "users/new_mobile_worker_modal.html"
urlname = 'new_mobile_worker_modal'
@method_decorator(require_can_edit_commcare_users)
def dispatch(self, request, *args, **kwargs):
if not can_add_extra_mobile_workers(request):
raise PermissionDenied()
return super(CreateCommCareUserModal, self).dispatch(request, *args, **kwargs)
def render_form(self, status):
return self.render_json_response({
"status": status,
"form_html": render_to_string(self.template_name, {
'form': self.new_commcare_user_form,
'data_fields_form': self.custom_data.form,
}, request=self.request)
})
def get(self, request, *args, **kwargs):
return self.render_form("success")
@property
@memoized
def custom_data(self):
return CustomDataEditor(
field_view=UserFieldsView,
domain=self.domain,
post_dict=self.request.POST if self.request.method == "POST" else None,
)
@property
@memoized
def new_commcare_user_form(self):
if self.request.method == "POST":
data = self.request.POST.dict()
form = CommCareAccountForm(data, domain=self.domain)
else:
form = CommCareAccountForm(domain=self.domain)
return form
@method_decorator(requires_privilege_with_fallback(privileges.OUTBOUND_SMS))
def post(self, request, *args, **kwargs):
if self.new_commcare_user_form.is_valid() and self.custom_data.is_valid():
username = self.new_commcare_user_form.cleaned_data['username']
password = self.new_commcare_user_form.cleaned_data['password_1']
phone_number = self.new_commcare_user_form.cleaned_data['phone_number']
user = CommCareUser.create(
self.domain,
username,
password,
phone_number=phone_number,
device_id="Generated from HQ",
user_data=self.custom_data.get_data_to_save(),
)
if 'location_id' in request.GET:
try:
loc = SQLLocation.objects.get(domain=self.domain,
location_id=request.GET['location_id'])
except SQLLocation.DoesNotExist:
raise Http404()
user.set_location(loc)
if phone_number:
initiate_sms_verification_workflow(user, phone_number)
user_json = {'user_id': user._id, 'text': user.username_in_report}
return self.render_json_response({"status": "success",
"user": user_json})
return self.render_form("failure")
class UploadCommCareUsers(BaseManageCommCareUserView):
template_name = 'hqwebapp/bulk_upload.html'
urlname = 'upload_commcare_users'
page_title = ugettext_noop("Bulk Upload Mobile Workers")
@method_decorator(requires_privilege_with_fallback(privileges.BULK_USER_MANAGEMENT))
def dispatch(self, request, *args, **kwargs):
return super(UploadCommCareUsers, self).dispatch(request, *args, **kwargs)
@property
def page_context(self):
request_params = self.request.GET if self.request.method == 'GET' else self.request.POST
context = {
'bulk_upload': {
"help_site": {
"address": BULK_MOBILE_HELP_SITE,
"name": _("CommCare Help Site"),
},
"download_url": reverse(
"download_commcare_users", args=(self.domain,)),
"adjective": _("mobile worker"),
"plural_noun": _("mobile workers"),
},
'show_secret_settings': request_params.get("secret", False),
}
context.update({
'bulk_upload_form': get_bulk_upload_form(context),
})
return context
def post(self, request, *args, **kwargs):
"""View's dispatch method automatically calls this"""
try:
self.workbook = get_workbook(request.FILES.get('bulk_upload_file'))
except WorkbookJSONError as e:
messages.error(request, str(e))
return self.get(request, *args, **kwargs)
try:
self.user_specs = self.workbook.get_worksheet(title='users')
except WorksheetNotFound:
try:
self.user_specs = self.workbook.get_worksheet()
except WorksheetNotFound:
return HttpResponseBadRequest("Workbook has no worksheets")
try:
self.group_specs = self.workbook.get_worksheet(title='groups')
except WorksheetNotFound:
self.group_specs = []
try:
check_headers(self.user_specs)
except UserUploadError as e:
messages.error(request, _(str(e)))
return HttpResponseRedirect(reverse(UploadCommCareUsers.urlname, args=[self.domain]))
task_ref = expose_cached_download(payload=None, expiry=1 * 60 * 60, file_extension=None)
task = import_users_and_groups.delay(
self.domain,
list(self.user_specs),
list(self.group_specs),
)
task_ref.set_task(task)
return HttpResponseRedirect(
reverse(
UserUploadStatusView.urlname,
args=[self.domain, task_ref.download_id]
)
)
class UserUploadStatusView(BaseManageCommCareUserView):
urlname = 'user_upload_status'
page_title = ugettext_noop('Mobile Worker Upload Status')
def get(self, request, *args, **kwargs):
context = super(UserUploadStatusView, self).main_context
context.update({
'domain': self.domain,
'download_id': kwargs['download_id'],
'poll_url': reverse('user_upload_job_poll', args=[self.domain, kwargs['download_id']]),
'title': _("Mobile Worker Upload Status"),
'progress_text': _("Importing your data. This may take some time..."),
'error_text': _("Problem importing data! Please try again or report an issue."),
'next_url': reverse(MobileWorkerListView.urlname, args=[self.domain]),
'next_url_text': _("Return to manage mobile workers"),
})
return render(request, 'hqwebapp/soil_status_full.html', context)
def page_url(self):
return reverse(self.urlname, args=self.args, kwargs=self.kwargs)
@require_can_edit_commcare_users
def user_upload_job_poll(request, domain, download_id, template="users/mobile/partials/user_upload_status.html"):
try:
context = get_download_context(download_id)
except TaskFailedError:
return HttpResponseServerError()
context.update({
'on_complete_short': _('Bulk upload complete.'),
'on_complete_long': _('Mobile Worker upload has finished'),
})
class _BulkUploadResponseWrapper(object):
def __init__(self, context):
results = context.get('result') or defaultdict(lambda: [])
self.response_rows = results['rows']
self.response_errors = results['errors']
self.problem_rows = [r for r in self.response_rows if r['flag'] not in ('updated', 'created')]
def success_count(self):
return len(self.response_rows) - len(self.problem_rows)
def has_errors(self):
return bool(self.response_errors or self.problem_rows)
def errors(self):
errors = []
for row in self.problem_rows:
if row['flag'] == 'missing-data':
errors.append(_('A row with no username was skipped'))
else:
errors.append('{username}: {flag}'.format(**row))
errors.extend(self.response_errors)
return errors
context['result'] = _BulkUploadResponseWrapper(context)
return render(request, template, context)
@require_can_edit_commcare_users
def user_download_job_poll(request, domain, download_id, template="hqwebapp/partials/shared_download_status.html"):
try:
context = get_download_context(download_id, 'Preparing download')
context.update({'link_text': _('Download Users')})
except TaskFailedError as e:
return HttpResponseServerError(e.errors)
return render(request, template, context)
class DownloadUsersStatusView(BaseManageCommCareUserView):
urlname = 'download_users_status'
page_title = ugettext_noop('Download Users Status')
def get(self, request, *args, **kwargs):
context = super(DownloadUsersStatusView, self).main_context
context.update({
'domain': self.domain,
'download_id': kwargs['download_id'],
'poll_url': reverse('user_download_job_poll', args=[self.domain, kwargs['download_id']]),
'title': _("Download Users Status"),
'progress_text': _("Preparing user download."),
'error_text': _("There was an unexpected error! Please try again or report an issue."),
'next_url': reverse(MobileWorkerListView.urlname, args=[self.domain]),
'next_url_text': _("Go back to Mobile Workers"),
})
return render(request, 'hqwebapp/soil_status_full.html', context)
def page_url(self):
return reverse(self.urlname, args=self.args, kwargs=self.kwargs)
class FilteredUserDownload(BaseManageCommCareUserView):
urlname = 'filter_and_download_commcare_users'
page_title = ugettext_noop('Filter and Download')
@method_decorator(require_can_edit_commcare_users)
def get(self, request, domain, *args, **kwargs):
form = CommCareUserFilterForm(request.GET, domain=domain)
context = self.main_context
context.update({'form': form, 'count_users_url': reverse('count_users', args=[domain])})
return render(
request,
"users/filter_and_download.html",
context
)
class UsernameUploadMixin(object):
"""
Contains helper functions for working with a file that consists of a single column of usernames.
"""
def _get_usernames(self, request):
"""
Get username list from Excel supplied in request.FILES.
Adds any errors to request.messages.
"""
sheet = self._get_sheet(request)
if not sheet:
return None
try:
usernames = [format_username(row['username'], request.domain) for row in sheet]
except KeyError:
messages.error(request, _("No users found. Please check your file contains a 'username' column."))
return None
if not len(usernames):
messages.error(request, _("No users found. Please check file is not empty."))
return None
return usernames
def _get_sheet(self, request):
try:
workbook = get_workbook(request.FILES.get('bulk_upload_file'))
except WorkbookJSONError as e:
messages.error(request, str(e))
return None
try:
sheet = workbook.get_worksheet()
except WorksheetNotFound:
messages.error(request, _("Workbook has no worksheets"))
return None
return sheet
class DeleteCommCareUsers(BaseManageCommCareUserView, UsernameUploadMixin):
urlname = 'delete_commcare_users'
page_title = ugettext_noop('Bulk Delete')
template_name = 'users/bulk_delete.html'
@property
def page_context(self):
context = self.main_context
context.update({
'bulk_upload_form': get_bulk_upload_form(),
})
return context
def post(self, request, *args, **kwargs):
usernames = self._get_usernames(request)
if not usernames:
return self.get(request, *args, **kwargs)
user_docs_by_id = {doc['_id']: doc for doc in get_user_docs_by_username(usernames)}
user_ids_with_forms = self._get_user_ids_with_forms(request, user_docs_by_id)
usernames_not_found = self._get_usernames_not_found(request, user_docs_by_id, usernames)
if user_ids_with_forms or usernames_not_found:
messages.error(request, _("""
No users deleted. Please address the above issue(s) and re-upload your updated file.
"""))
else:
self._delete_users(request, user_docs_by_id, user_ids_with_forms)
return self.get(request, *args, **kwargs)
def _get_user_ids_with_forms(self, request, user_docs_by_id):
"""
Find users who have ever submitted a form, and add to request.messages if so.
"""
user_ids_with_forms = (
FormES()
.domain(request.domain)
.user_id(list(user_docs_by_id))
.terms_aggregation('form.meta.userID', 'user_id')
).run().aggregations.user_id.keys
if user_ids_with_forms:
message = _("""
The following users have form submissions and must be deleted individually: {}.
""").format(", ".join([raw_username(user_docs_by_id[user_id]['username'])
for user_id in user_ids_with_forms]))
messages.error(request, message)
return user_ids_with_forms
def _get_usernames_not_found(self, request, user_docs_by_id, usernames):
"""
The only side effect of this is to possibly add to request.messages.
"""
usernames_not_found = set(usernames) - {doc['username'] for doc in user_docs_by_id.values()}
if usernames_not_found:
message = _("The following users were not found: {}.").format(
", ".join(map(raw_username, usernames_not_found)))
messages.error(request, message)
return usernames_not_found
def _delete_users(self, request, user_docs_by_id, user_ids_with_forms):
deleted_count = 0
for user_id, doc in user_docs_by_id.items():
if user_id not in user_ids_with_forms:
CommCareUser.wrap(doc).delete()
deleted_count += 1
if deleted_count:
messages.success(request, f"{deleted_count} user(s) deleted.")
class CommCareUsersLookup(BaseManageCommCareUserView, UsernameUploadMixin):
urlname = 'commcare_users_lookup'
page_title = ugettext_noop('Mobile Workers Bulk Lookup')
template_name = 'users/bulk_lookup.html'
@property
def page_context(self):
context = self.main_context
context.update({
'bulk_upload_form': get_bulk_upload_form(),
})
return context
def post(self, request, *args, **kwargs):
usernames = self._get_usernames(request)
if not usernames:
return self.get(request, *args, **kwargs)
docs_by_username = {doc['username']: doc for doc in get_user_docs_by_username(usernames)}
rows = []
for username in usernames:
row = [raw_username(username)]
if username in docs_by_username:
row.extend([_("yes"), docs_by_username[username].get("is_active")])
else:
row.extend([_("no"), ""])
rows.append(row)
response = HttpResponse(content_type=Format.from_format('xlsx').mimetype)
response['Content-Disposition'] = f'attachment; filename="{self.domain} users.xlsx"'
response.write(self._excel_data(rows))
return response
def _excel_data(self, rows):
outfile = io.BytesIO()
tab_name = "users"
header_table = [(tab_name, [(_("username"), _("exists"), _("is_active"))])]
writer = Excel2007ExportWriter()
writer.open(header_table=header_table, file=outfile)
writer.write([(tab_name, rows)])
writer.close()
return outfile.getvalue()
@require_can_edit_commcare_users
def count_users(request, domain):
from corehq.apps.users.dbaccessors.all_commcare_users import get_commcare_users_by_filters
form = CommCareUserFilterForm(request.GET, domain=domain)
user_filters = {}
if form.is_valid():
user_filters = form.cleaned_data
else:
return HttpResponseBadRequest("Invalid Request")
return json_response({
'count': get_commcare_users_by_filters(domain, user_filters, count_only=True)
})
@require_can_edit_commcare_users
def download_commcare_users(request, domain):
form = CommCareUserFilterForm(request.GET, domain=domain)
user_filters = {}
if form.is_valid():
user_filters = form.cleaned_data
else:
return HttpResponseRedirect(
reverse(FilteredUserDownload.urlname, args=[domain]) + "?" + request.GET.urlencode())
download = DownloadBase()
if form.cleaned_data['columns'] == CommCareUserFilterForm.USERNAMES_COLUMN_OPTION:
res = bulk_download_usernames_async.delay(domain, download.download_id, user_filters)
else:
res = bulk_download_users_async.delay(domain, download.download_id, user_filters)
download.set_task(res)
return redirect(DownloadUsersStatusView.urlname, domain, download.download_id)
class CommCareUserSelfRegistrationView(TemplateView, DomainViewMixin):
template_name = "users/mobile/commcare_user_self_register.html"
urlname = "commcare_user_self_register"
strict_domain_fetching = True
@property
@memoized
def token(self):
return self.kwargs.get('token')
@property
@memoized
def invitation(self):
return SelfRegistrationInvitation.by_token(self.token)
@property
@memoized
def form(self):
if self.request.method == 'POST':
return SelfRegistrationForm(self.request.POST, domain=self.domain,
require_email=self.invitation.require_email)
else:
return SelfRegistrationForm(domain=self.domain,
require_email=self.invitation.require_email)
def get_context_data(self, **kwargs):
context = super(CommCareUserSelfRegistrationView, self).get_context_data(**kwargs)
context.update({
'hr_name': self.domain_object.display_name(),
'form': self.form,
'invitation': self.invitation,
'can_add_extra_mobile_workers': can_add_extra_mobile_workers(self.request),
'google_play_store_url': GOOGLE_PLAY_STORE_COMMCARE_URL,
})
return context
def validate_request(self):
if (
not self.invitation or
self.invitation.domain != self.domain or
not self.domain_object.sms_mobile_worker_registration_enabled
):
raise Http404()
def get(self, request, *args, **kwargs):
self.validate_request()
return super(CommCareUserSelfRegistrationView, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.validate_request()
if (
not self.invitation.expired and
not self.invitation.already_registered and
self.form.is_valid()
):
email = self.form.cleaned_data.get('email')
if email:
email = email.lower()
user = CommCareUser.create(
self.domain,
self.form.cleaned_data.get('username'),
self.form.cleaned_data.get('password'),
email=email,
phone_number=self.invitation.phone_number,
device_id='Generated from HQ',
user_data=self.invitation.custom_user_data,
)
# Since the user is being created by following the link and token
# we sent to their phone by SMS, we can verify their phone number
entry = user.get_or_create_phone_entry(self.invitation.phone_number)
entry.set_two_way()
entry.set_verified()
entry.save()
self.invitation.registered_date = datetime.utcnow()
self.invitation.save()
return self.get(request, *args, **kwargs)
@method_decorator(TWO_STAGE_USER_PROVISIONING.required_decorator(), name='dispatch')
class CommCareUserConfirmAccountView(TemplateView, DomainViewMixin):
template_name = "users/commcare_user_confirm_account.html"
urlname = "commcare_user_confirm_account"
strict_domain_fetching = True
@property
@memoized
def user_id(self):
return self.kwargs.get('user_id')
@property
@memoized
def user(self):
return get_document_or_404(CommCareUser, self.domain, self.user_id)
@property
@memoized
def form(self):
if self.request.method == 'POST':
return MobileWorkerAccountConfirmationForm(self.request.POST)
else:
return MobileWorkerAccountConfirmationForm(initial={
'username': self.user.raw_username,
'full_name': self.user.full_name,
'email': self.user.email,
})
def get_context_data(self, **kwargs):
context = super(CommCareUserConfirmAccountView, self).get_context_data(**kwargs)
context.update({
'domain_name': self.domain_object.display_name(),
'user': self.user,
'form': self.form,
})
return context
def post(self, request, *args, **kwargs):
form = self.form
if form.is_valid():
user = self.user
user.email = form.cleaned_data['email']
full_name = form.cleaned_data['full_name']
user.first_name = full_name[0]
user.last_name = full_name[1]
user.confirm_account(password=self.form.cleaned_data['password'])
messages.success(request, _(
f'You have successfully confirmed the {user.raw_username} account. '
'You can now login'
))
return HttpResponseRedirect('{}?username={}'.format(
reverse('domain_login', args=[self.domain]),
user.raw_username,
))
# todo: process form data and activate the account
return self.get(request, *args, **kwargs)
|
"""The purpose of this script is to convert the cree "words by frequency"
txt file into a simple word list for the visualization script.
"""
import re
INDIR = "/home/wlane/projects/morph-completion-visualization/data/words/cree_words_by_freq.txt"
OUTDIR = "/home/wlane/projects/morph-completion-visualization/data/words/cree_wordlist.txt"
def main():
lines = []
with open(INDIR, "r") as f:
lines = f.readlines()
lines = [x.split()[1] for x in lines]
with open(OUTDIR, "w") as g:
for l in lines:
# l = re.sub("-", "", l)
g.write(l.lower() + "\n")
if __name__ == "__main__":
main() |
""" Interface class for the LISP-based mReasoner implementation.
"""
import copy
import logging
import os
import platform
import queue
import subprocess
import threading
import time
import urllib.request
import zipfile
import numpy as np
import scipy.optimize as so
FASL_ENDINGS = {
'Darwin': 'dx64fsl',
'Windows': 'wx64fsl',
'Linux': 'lx64fsl'
}
def source_path(mreas_path='.mreasoner'):
""" Determines the source path of mReasoner if existent. Downloads a copy if necessary.
Parameters
----------
mreas_path : str
Target path for the mReasoner source copy.
Returns
-------
str
Path to the directory containing the mReasoner sources.
"""
if not os.path.exists(mreas_path):
# Create the mreasoner directory
os.mkdir(mreas_path)
# Download the mreasoner source
link = 'https://nc.informatik.uni-freiburg.de/index.php/s/JyMd3g36wXdgwy3/download'
dl_target = mreas_path + os.sep + 'mReasoner.zip'
urllib.request.urlretrieve(link, dl_target)
# Unzip content
with zipfile.ZipFile(dl_target, 'r') as zip_ref:
zip_ref.extractall(mreas_path)
# Look for mReasoner directory
for name in os.listdir(mreas_path):
path = mreas_path + os.sep + name
if not os.path.isdir(path) or name.startswith('_'):
continue
mreas_path = path + os.sep + 'src'
return mreas_path
class MReasoner():
""" LISP mReasoner wrapper. Executes a Clozure Common LISP subprocess to run an unmodified
version of mReasoner. Provides basic interfacing mechanisms for inference generation and
parameter fitting.
"""
def __init__(self, ccl_path, mreasoner_dir):
""" Constructs the mReasoner instance by launching the LISP subprocess.
Parameters
----------
ccl_path : str
Path to the Clozure Common LISP executable.
mreasoner_dir : str
Path to the mReasoner source code directory.
"""
# Initialize logger instance
self.logger = logging.getLogger(__name__)
self.param_bounds = [[0.0, 1.0], [0.1, 8.0], [0.0, 1.0], [0.0, 1.0]]
self.proc = subprocess.Popen(
[ccl_path],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
# Add debug stuff
self.received_messages = []
self.executed_commands = []
# Instantiate the result queue
self.resp_queue = queue.Queue()
# Register the readers
def stdout_reader(proc):
# Setup thread logger
logger = logging.getLogger(__name__ + '-reader')
logger.debug('Starting reader...')
while True:
# Read text from mReasoner output
text = proc.stdout.readline().decode('ascii').strip()
logger.debug('mReasoner:%s', text)
self.received_messages.append(text)
# Ignore comments and query results
if text.startswith(';'):
continue
# Catch the termination signal
if 'TERMINATE' in text:
logger.debug('termination handling initiated...')
break
if ('While executing:' in text) or ('Error:' in text):
self.resp_queue.put('HALT')
# Handle query results
if text.startswith('?'):
query_result = text[2:]
# Handle different query results
is_float = False
try:
float(query_result)
is_float = True
except ValueError:
pass
# Ignore float results
if is_float:
continue
if query_result[0] == '(' and query_result[-1] == ')':
logger.debug('Queue-Quant:%s', query_result)
self.resp_queue.put(query_result)
elif query_result[0] == '"' and query_result[-1] == '"':
query_result = query_result.replace('"', '')
logger.debug('Queue-Concl:%s', query_result)
self.resp_queue.put(query_result)
elif query_result == 'NIL':
logger.debug('Queue-NIL:%s', query_result)
self.resp_queue.put(query_result)
else:
logger.debug('Queue-INVAL:%s', query_result)
logger.debug('terminating...')
self.readerstdout = threading.Thread(target=stdout_reader, args=(self.proc,), daemon=True)
self.readerstdout.start()
# Load mReasoner in CCL environment
mreasoner_file = mreasoner_dir + os.sep + "+mReasoner.lisp"
fasl_path = mreasoner_dir + os.sep + "+mReasoner.{}".format(FASL_ENDINGS[platform.system()])
# Compile mreasoner if not done before
if not os.path.exists(fasl_path):
print('Compiling...')
mreasoner_file = mreasoner_file.replace('\\', '/')
self._send('(compile-file "{}")'.format(mreasoner_file))
else:
print('Dont compile')
fasl_path = fasl_path.replace('\\', '/')
self._send('(load "{}")'.format(fasl_path))
self._send('(defvar resp 0)')
# Initialize parameter values
self.default_params = {
'epsilon': 0.0,
'lambda': 4.0,
'omega': 1.0,
'sigma': 0.0
}
self.params = copy.deepcopy(self.default_params)
self.set_param('epsilon', self.params['epsilon'])
self.set_param('lambda', self.params['lambda'])
self.set_param('omega', self.params['omega'])
self.set_param('sigma', self.params['sigma'])
def _send(self, cmd):
""" Send a command to the Clozure Common LISP subprocess.
Parameters
----------
cmd : str
Command to send.
"""
# Normalize the command
cmd.strip()
self.executed_commands.append(cmd)
self.logger.debug('Send:%s', cmd)
self.proc.stdin.write('{}\n'.format(cmd).encode('ascii'))
self.proc.stdin.flush()
@staticmethod
def construct_premises(syllog):
""" Constructs mReasoner representation of the premises for a given syllogism identifier.
Parameters
----------
syllog : str
Syllogistic problem identifier (e.g., 'AA1', 'OE3').
Returns
-------
(p1, p2) : str
Tuple of the mReasoner representations of both premises.
"""
template_quant = {
'A': 'All {} are {}',
'I': 'Some {} are {}',
'E': 'No {} are {}',
'O': 'Some {} are not {}'
}
template_fig = {
'1': [['A', 'B'], ['B', 'C']],
'2': [['B', 'A'], ['C', 'B']],
'3': [['A', 'B'], ['C', 'B']],
'4': [['B', 'A'], ['B', 'C']]
}
prem1 = template_quant[syllog[0]].format(*template_fig[syllog[-1]][0])
prem2 = template_quant[syllog[1]].format(*template_fig[syllog[-1]][1])
return prem1, prem2
def query(self, syllog):
""" Queries mReasoner for a prediction for a given syllogistic problem.
Parameters
----------
syllog : str
Syllogistic problem identifier (e.g., 'AA1', 'EO3') to generate prediction for.
Returns
-------
str
Generated syllogistic response identifier (e.g., 'Aac', 'Ica')
"""
prem1, prem2 = self.construct_premises(syllog)
# Send the conclusion generation query
cmd = "(what-follows? (list (parse '({})) (parse '({}))))".format(prem1, prem2)
cmd = '(setf resp {})'.format(cmd)
self.logger.debug('Query:%s', cmd)
self._send(cmd)
# Check query result
query_result = self.resp_queue.get()
self.logger.debug('Query result: "%s"', query_result)
conclusion = None
if 'Q-INTENSION' in query_result:
# Send the result interpretation query
# cmd = '(abbreviate (nth (random (length resp)) resp))'
cmd = "(map 'list (lambda (x) (abbreviate x)) resp)"
self.logger.debug('Query:%s', cmd)
self._send(cmd)
# Retrieve queue output
conclusion = self.resp_queue.get()
elif ('NULL-INTENSION' in query_result) or (query_result == 'NIL'):
self.logger.debug('NVC-RESULT:%s', query_result)
if 'Q-INTENSION' in query_result:
assert False
conclusion = 'NVC'
else:
self.logger.warning('QUERY-RES-INVALID:%s', query_result)
assert False
conclusion_candidates = conclusion.replace('"', '').replace('(', '').replace(')', '').split()
self.logger.debug('%s->%s', syllog, conclusion_candidates)
return conclusion_candidates
def terminate(self):
""" Terminate mReasoner and its parent instance of Clozure Common LISP.
"""
# Shutdown the threads
self._send('(prin1 "TERMINATE")')
self.logger.debug('Waiting for stdout...')
self.readerstdout.join()
# Terminate Clozure
self._send('(quit)')
def set_param(self, param, value):
""" Set mReasoner parameter to a specified value.
Parameter
---------
param : str
Parameter identifier. Can be one of ['epsilon', 'lambda', 'omega', 'sigma'].
value : float
Parameter value.
Raises
------
ValueError
If invalid param is specified.
"""
if param not in self.params:
raise ValueError('Attempted to set invalid parameter: {}'.format(param))
self.params[param] = value
# Send parameter change to mReasoner
cmd = '(setf +{}+ {:f})'.format(param, value)
self.logger.debug('Param-Set: %s->%f:%s', param, value, cmd)
self._send(cmd)
def set_param_vec(self, params):
""" Directly set a vector of params.
Parameters
----------
params : list(float)
Vector of parameter values. Interpreted according to the order ['epsilon', 'lambda',
'omega', 'sigma'].
"""
param_names = ['epsilon', 'lambda', 'omega', 'sigma']
for name, value in zip(param_names, params):
self.set_param(name, value)
def _fit_fun(self, params, train_x, train_y, include_param=False):
""" Fitting helper function. Receives parameter values and computes accuracy on given
training and test data.
Parameters
----------
params : list(float)
List of parameters.
include_params : boolean
Flag to indicate that params are to be included in results.
Results
-------
float
Predictive accuracy.
list(float), optional
Parameters
"""
# Set the parameters
self.set_param_vec(params)
score = 0
for task, resp in zip(train_x, train_y):
pred = self.query(task)
score += 1/len(pred) if resp in pred else 0
inaccuracy = 1 - (score / len(train_x))
self.logger.debug('Fitting-Eval: (p=%s): %f', params, inaccuracy)
if include_param:
return inaccuracy, params
return inaccuracy
def fit(self, train_x, train_y, num_fits=10):
""" Fits mReasoner parameters to the specified data.
Parameters
----------
train_x : list(str)
List of syllogistic task encodings (e.g., 'AA1').
train_y : list(str)
List of syllogistic response encodings (e.g., 'Aac').
Returns
-------
float
Fit result accuracy.
"""
results = []
for idx in range(num_fits):
self.logger.debug('Starting fit %d/%d...', idx + 1, num_fits)
start_time = time.time()
# start_params = [x[1] for x in sorted(self.params.items())]
start_params = [np.random.uniform(lims[0], lims[1]) for lims in self.param_bounds]
res = so.minimize(
self._fit_fun,
start_params,
method='L-BFGS-B',
bounds=self.param_bounds,
args=(train_x, train_y))
if res.success:
self.logger.debug('Fitting iteration success:\n%s', res)
results.append((res.fun, res.x))
else:
self.logger.warning('Fitting iteration failed:\n%s', res)
self.logger.debug('...fit took {:4f}s'.format(time.time() - start_time))
if len(results) != num_fits:
self.logger.warning(
'%d/%d fitting runs unsuccessful', num_fits - len(results), num_fits)
# If all fits unsuccessful, use default parameters
if not results:
self.logger.warning('Fitting failed, setting to default params')
for param, value in self.default_params.items():
self.set_param(param, value)
return -1, [self.default_params[x] for x in ['epsilon', 'lambda', 'omega', 'sigma']]
# Obtain best parameter configuration
optim_score, optim_params = sorted(results, key=lambda x: x[0])[0]
self.set_param_vec(optim_params)
return optim_score, optim_params
def fit_grid(self, train_x, train_y, num=10):
best_error = 2
best_params = None
for p_epsilon in np.linspace(*self.param_bounds[0], num):
for p_lambda in np.linspace(*self.param_bounds[1], num):
for p_omega in np.linspace(*self.param_bounds[2], num):
for p_sigma in np.linspace(*self.param_bounds[3], num):
params = [p_epsilon, p_lambda, p_omega, p_sigma]
start = time.time()
error = self._fit_fun(params, train_x, train_y)
self.logger.debug('Grid fit iteration took {:.2f}s'.format(
time.time() - start))
if error < best_error:
best_error = error
best_params = params
self.set_param_vec(best_params)
return best_error, best_params
def fit_rnd(self, train_x, train_y, num=10, old_params=None):
best_error = 2
best_params = None
if old_params:
best_params = old_params
best_error = self._fit_fun(best_params, train_x, train_y)
for _ in range(num):
p_epsilon = np.random.uniform(*self.param_bounds[0])
p_lambda = np.random.uniform(*self.param_bounds[1])
p_omega = np.random.uniform(*self.param_bounds[2])
p_sigma = np.random.uniform(*self.param_bounds[3])
params = [p_epsilon, p_lambda, p_omega, p_sigma]
error = self._fit_fun(params, train_x, train_y)
if error < best_error:
best_error = error
best_params = params
self.set_param_vec(best_params)
return best_error, best_params
|
class MacroCommand():
def __init__(self, commands:list):
self.commands = commands
def __call__(self):
for command in self.commands:
command()
macroCommand = MacroCommand()
macroCommand() |
import doctest
import insights.parsers.octavia as octavia_module
from insights.parsers.octavia import OctaviaConf, VALID_KEYS
from insights.tests import context_wrap
CONF_FILE = """
[DEFAULT]
# Print debugging output (set logging level to DEBUG instead of default WARNING level).
debug = False
# Plugin options are hot_plug_plugin (Hot-pluggable controller plugin)
octavia_plugins = hot_plug_plugin
# Hostname to be used by the host machine for services running on it.
# The default value is the hostname of the host machine.
host = some_hostname.some_domain.com
# AMQP Transport URL
# For Single Host, specify one full transport URL:
# transport_url = rabbit://<user>:<pass>@127.0.0.1:5672/<vhost>
# For HA, specify queue nodes in cluster, comma delimited:
# transport_url = rabbit://<user>:<pass>@server01,<user>:<pass>@server02/<vhost>
transport_url =
# How long in seconds to wait for octavia worker to exit before killing them.
graceful_shutdown_timeout = 60
log_file = some_file
log_dir = some_dir
policy_file = some_policy_file
[api_settings]
bind_host = 127.0.0.1
bind_port = 9876
# How should authentication be handled (keystone, noauth)
auth_strategy = keystone
allow_pagination = True
allow_sorting = True
pagination_max_limit = 1000
# Base URI for the API for use in pagination links.
# This will be autodetected from the request if not overridden here.
# Example:
# api_base_uri = http://localhost:9876
api_base_uri = http://localhost:9876
# Enable/disable ability for users to create TLS Terminated listeners
allow_tls_terminated_listeners = True
# Enable/disable ability for users to create PING type Health Monitors
allow_ping_health_monitors = True
# Dictionary of enabled provider driver names and descriptions
# A comma separated list of dictionaries of the enabled provider driver names
# and descriptions.
enabled_provider_drivers = amphora:The Octavia Amphora driver.,octavia: \\
Deprecated alias of the Octavia Amphora driver.
# Default provider driver
default_provider_driver = amphora
# The minimum health monitor delay interval for UDP-CONNECT Health Monitor type
udp_connect_min_interval_health_monitor = 3
[database]
# This line MUST be changed to actually run the plugin.
# Example:
# connection = mysql+pymysql://root:[email protected]:3306/octavia
# Replace 127.0.0.1 above with the IP address of the database used by the
# main octavia server. (Leave it as is if the database runs on this host.)
connection = mysql+pymysql://
# NOTE: In deployment the [database] section and its connection attribute may
# be set in the corresponding core plugin '.ini' file. However, it is suggested
# to put the [database] section and its connection attribute in this
# configuration file.
[health_manager]
bind_ip = 127.0.0.1
bind_port = 5555
# controller_ip_port_list example: 127.0.0.1:5555, 127.0.0.1:5555
controller_ip_port_list = 127.0.0.1:5555, 127.0.0.1:5555
failover_threads = 10
# status_update_threads will default to the number of processors on the host.
# This setting is deprecated and if you specify health_update_threads and
# stats_update_threads, they override this parameter.
status_update_threads = 10
# health_update_threads will default to the number of processors on the host
health_update_threads = 10
# stats_update_threads will default to the number of processors on the host
stats_update_threads = 10
heartbeat_interval = 10
# Symmetric encrpytion key
heartbeat_key =
heartbeat_timeout = 60
health_check_interval = 3
sock_rlimit = 0
# Health/StatsUpdate options are
# *_db
# *_logger
health_update_driver = health_db
stats_update_driver = stats_db
[keystone_authtoken]
# This group of config options are imported from keystone middleware. Thus the
# option names should match the names declared in the middleware.
# The www_authenticate_uri is the public endpoint and is returned in headers on a 401
# www_authenticate_uri = https://localhost:5000/v3
# The auth_url is the admin endpoint actually used for validating tokens
auth_url = https://localhost:5000/v3
username = octavia
password = password
project_name = service
# Domain names must be set, these are *not* default but work for most clouds
# project_domain_name = Default
user_domain_name = Default
insecure = False
cafile =
[certificates]
# Certificate Generator options are local_cert_generator
cert_generator = local_cert_generator
# For local certificate signing:
ca_certificate = /etc/ssl/certs/ssl-cert-snakeoil.pem
ca_private_key = /etc/ssl/private/ssl-cert-snakeoil.key
ca_private_key_passphrase =
server_certs_key_passphrase = do-not-use-this-key
signing_digest = sha256
cert_validity_time = 2592000 # 30 days = 30d * 24h * 60m * 60s = 2592000s
storage_path = /var/lib/octavia/certificates/
# For the TLS management
# Certificate Manager options are local_cert_manager
# barbican_cert_manager
# castellan_cert_manager
cert_manager = barbican_cert_manager
# For Barbican authentication (if using any Barbican based cert class)
barbican_auth = barbican_acl_auth
#
# Region in Identity service catalog to use for communication with the Barbican service.
region_name = some_region
#
# Endpoint type to use for communication with the Barbican service.
endpoint_type = publicURL
[networking]
# The maximum attempts to retry an action with the networking service.
max_retries = 15
# Seconds to wait before retrying an action with the networking service.
retry_interval = 1
# The maximum time to wait, in seconds, for a port to detach from an amphora
port_detach_timeout = 300
# Allow/disallow specific network object types when creating VIPs.
allow_vip_network_id = True
allow_vip_subnet_id = True
allow_vip_port_id = True
# List of network_ids that are valid for VIP creation.
# If this field empty, no validation is performed.
valid_vip_networks =
# List of reserved IP addresses that cannot be used for member addresses
# The default is the nova metadata service address
reserved_ips = ['169.254.169.254']
[haproxy_amphora]
base_path = /var/lib/octavia
base_cert_dir = /var/lib/octavia/certs
# Absolute path to a custom HAProxy template file
haproxy_template = /some/path
connection_logging = True
connection_max_retries = 120
connection_retry_interval = 5
build_rate_limit = -1
build_active_retries = 120
build_retry_interval = 5
# Maximum number of entries that can fit in the stick table.
# The size supports "k", "m", "g" suffixes.
haproxy_stick_size = 10k
# REST Driver specific
bind_host = 0.0.0.0
bind_port = 9443
#
# This setting is only needed with IPv6 link-local addresses (fe80::/64) are
# used for communication between Octavia and its Amphora, if IPv4 or other IPv6
# addresses are used it can be ignored.
lb_network_interface = o-hm0
#
haproxy_cmd = /usr/sbin/haproxy
respawn_count = 2
respawn_interval = 2
client_cert = /etc/octavia/certs/client.pem
server_ca = /etc/octavia/certs/server_ca.pem
#
# This setting is deprecated. It is now automatically discovered.
use_upstart = True
#
rest_request_conn_timeout = 10
rest_request_read_timeout = 60
#
# These "active" timeouts are used once the amphora should already
# be fully up and active. These values are lower than the other values to
# facilitate "fail fast" scenarios like failovers
active_connection_max_retries = 15
active_connection_rety_interval = 2
# The user flow log format for HAProxy.
# {{ project_id }} and {{ lb_id }} will be automatically substituted by the
# controller when configuring HAProxy if they are present in the string.
user_log_format = '{{ project_id }} {{ lb_id }} %f %ci %cp %t %{+Q}r %ST %B %U %[ssl_c_verify] %{+Q}[ssl_c_s_dn] %b %s %Tt %tsc'
[controller_worker]
workers = 1
amp_active_retries = 30
amp_active_wait_sec = 10
# Glance parameters to extract image ID to use for amphora. Only one of
# parameters is needed. Using tags is the recommended way to refer to images.
amp_image_id =
amp_image_tag =
# Optional owner ID used to restrict glance images to one owner ID.
# This is a recommended security setting.
amp_image_owner_id =
# Nova parameters to use when booting amphora
amp_flavor_id =
# Upload the ssh key as the service_auth user described elsewhere in this config.
# Leaving this variable blank will install no ssh key on the amphora.
amp_ssh_key_name =
amp_ssh_access_allowed = True
# Networks to attach to the Amphorae examples:
# - One primary network
# - - amp_boot_network_list = 22222222-3333-4444-5555-666666666666
# - Multiple networks
# - - amp_boot_network_list = 11111111-2222-33333-4444-555555555555, 22222222-3333-4444-5555-666666666666
# - All networks defined in the list will be attached to each amphora
amp_boot_network_list =
amp_secgroup_list =
client_ca = /etc/octavia/certs/ca_01.pem
# Amphora driver options are amphora_noop_driver,
# amphora_haproxy_rest_driver
#
amphora_driver = amphora_noop_driver
#
# Compute driver options are compute_noop_driver
# compute_nova_driver
#
compute_driver = compute_noop_driver
#
# Network driver options are network_noop_driver
# allowed_address_pairs_driver
#
network_driver = network_noop_driver
# Volume driver options are volume_noop_driver
# volume_cinder_driver
#
volume_driver = volume_noop_driver
#
# Distributor driver options are distributor_noop_driver
# single_VIP_amphora
#
distributor_driver = distributor_noop_driver
#
# Load balancer topology options are SINGLE, ACTIVE_STANDBY
loadbalancer_topology = SINGLE
user_data_config_drive = False
[task_flow]
# TaskFlow engine options are:
# - serial: Runs all tasks on a single thread.
# - parallel: Schedules tasks onto different threads to allow
# for running non-dependent tasks simultaneously
#
engine = parallel
max_workers = 5
#
# This setting prevents the controller worker from reverting taskflow flows.
# This will leave resources in an inconsistent state and should only be used
# for debugging purposes.
disable_revert = False
[oslo_messaging]
# Queue Consumer Thread Pool Size
rpc_thread_pool_size = 2
# Topic (i.e. Queue) Name
topic = octavia_prov
[oslo_middleware]
# HTTPProxyToWSGI middleware enabled
enable_proxy_headers_parsing = False
[house_keeping]
# Interval in seconds to initiate spare amphora checks
spare_check_interval = 30
spare_amphora_pool_size = 0
# Cleanup interval for Deleted amphora
cleanup_interval = 30
# Amphora expiry age in seconds. Default is 1 week
amphora_expiry_age = 604800
# Load balancer expiry age in seconds. Default is 1 week
load_balancer_expiry_age = 604800
[amphora_agent]
agent_server_ca = /etc/octavia/certs/client_ca.pem
agent_server_cert = /etc/octavia/certs/server.pem
# Defaults for agent_server_network_dir when not specified here are:
# Ubuntu: /etc/netns/amphora-haproxy/network/interfaces.d/
# Centos/fedora/rhel: /etc/netns/amphora-haproxy/sysconfig/network-scripts/
#
agent_server_network_dir =
agent_server_network_file =
agent_request_read_timeout = 180
# Minimum TLS protocol, eg: TLS, TLSv1.1, TLSv1.2, TLSv1.3 (if available)
agent_tls_protocol = TLSv1.2
# Amphora default UDP driver is keepalived_lvs
#
amphora_udp_driver = keepalived_lvs
##### Log offloading
#
# Note: The admin and tenant logs can point to the same endpoints.
#
# List of log server ip and port pairs for Administrative logs.
# Additional hosts are backup to the primary server. If none are
# specified, remote logging is disabled.
# Example 192.0.2.1:10514, 2001:db8:1::10:10514'
#
admin_log_targets =
#
# List of log server ip and port pairs for tenant traffic logs.
# Additional hosts are backup to the primary server. If none are
# specified, remote logging is disabled.
# Example 192.0.2.1:10514, 2001:db8:2::15:10514'
#
tenant_log_targets =
# Sets the syslog LOG_LOCAL[0-7] facility number for amphora log offloading.
# user_log_facility will receive the traffic flow logs.
# administrative_log_facility will receive the amphora processes logs.
# Note: Some processes only support LOG_LOCAL, so we are restricted to the
# LOG_LOCAL facilities.
#
user_log_facility = 0
administrative_log_facility = 1
# The log forwarding protocol to use. One of TCP or UDP.
log_protocol = UDP
# The maximum attempts to retry connecting to the logging host.
log_retry_count = 5
# The time, in seconds, to wait between retries connecting to the logging host.
log_retry_interval = 2
# The queue size (messages) to buffer log messages.
log_queue_size = 10000
# Controller local path to a custom logging configuration template.
# Currently this is an rsyslog configuration file template.
logging_template_override =
# When True, the amphora will forward all of the system logs (except tenant
# traffice logs) to the admin log target(s). When False, only amphora specific
# admin logs will be forwarded.
forward_all_logs = False
# When True, no logs will be written to the amphora filesystem. When False,
# log files will be written to the local filesystem.
disable_local_log_storage = False
[keepalived_vrrp]
# Amphora Role/Priority advertisement interval in seconds
vrrp_advert_int = 1
# Service health check interval and success/fail count
vrrp_check_interval = 5
vrrp_fail_count = 2
vrrp_success_count = 2
# Amphora MASTER gratuitous ARP refresh settings
vrrp_garp_refresh_interval = 5
vrrp_garp_refresh_count = 2
[service_auth]
memcached_servers =
cafile = /opt/stack/data/ca-bundle.pem
project_domain_name = Default
project_name = admin
user_domain_name = Default
password = password
username = admin
auth_type = password
auth_url = http://localhost:5555/
[nova]
# The name of the nova service in the keystone catalog
service_name =
# Custom nova endpoint if override is necessary
endpoint =
# Region in Identity service catalog to use for communication with the
# OpenStack services.
region_name =
# Endpoint type in Identity service catalog to use for communication with
# the OpenStack services.
endpoint_type = publicURL
# CA certificates file to verify neutron connections when TLS is enabled
ca_certificates_file =
# Disable certificate validation on SSL connections
insecure = False
# If non-zero, generate a random name of the length provided for each amphora,
# in the format "a[A-Z0-9]*".
# Otherwise, the default name format will be used: "amphora-{UUID}".
random_amphora_name_length = 0
#
# Availability zone to use for creating Amphorae
availability_zone =
# Enable anti-affinity in nova
enable_anti_affinity = False
# Set the anti-affinity policy to what is suitable.
# Nova supports: anti-affinity and soft-anti-affinity
anti_affinity_policy = anti-affinity
[cinder]
# The name of the cinder service in the keystone catalog
service_name =
# Custom cinder endpoint if override is necessary
endpoint =
# Region in Identity service catalog to use for communication with the
# OpenStack services.
region_name =
# Endpoint type in Identity service catalog to use for communication with
# the OpenStack services.
endpoint_type = publicURL
# Availability zone to use for creating Volume
availability_zone =
# CA certificates file to verify cinder connections when TLS is enabled
insecure = False
ca_certificates_file =
# Size of root volume in GB for Amphora Instance when use Cinder
# In some storage backends such as ScaleIO, the size of volume is multiple of 8
volume_size = 16
# Volume type to be used for Amphora Instance root disk
# If not specified, default_volume_type from cinder.conf will be used
volume_type =
# Interval time to wait until volume becomes available
volume_create_retry_interval = 5
# Timeout to wait for volume creation success
volume_create_timeout = 300
# Maximum number of retries to create volume
volume_create_max_retries = 5
[glance]
# The name of the glance service in the keystone catalog
service_name =
# Custom glance endpoint if override is necessary
endpoint =
# Region in Identity service catalog to use for communication with the
# OpenStack services.
region_name =
# Endpoint type in Identity service catalog to use for communication with
# the OpenStack services.
endpoint_type = publicURL
# CA certificates file to verify neutron connections when TLS is enabled
insecure = False
ca_certificates_file =
[neutron]
# The name of the neutron service in the keystone catalog
service_name =
# Custom neutron endpoint if override is necessary
endpoint =
# Region in Identity service catalog to use for communication with the
# OpenStack services.
region_name =
# Endpoint type in Identity service catalog to use for communication with
# the OpenStack services.
endpoint_type = publicURL
# CA certificates file to verify neutron connections when TLS is enabled
insecure = False
ca_certificates_file =
[quotas]
default_load_balancer_quota = -1
default_listener_quota = -1
default_member_quota = -1
default_pool_quota = -1
default_health_monitor_quota = -1
[audit]
# Enable auditing of API requests.
enabled = False
# Path to audit map file for octavia-api service. Used only
# when API audit is enabled.
audit_map_file = /etc/octavia/octavia_api_audit_map.conf
# Comma separated list of REST API HTTP methods to be
# ignored during audit. For example: auditing will not be done
# on any GET or POST requests if this is set to "GET,POST". It
# is used only when API audit is enabled.
ignore_req_list =
[audit_middleware_notifications]
# Note: This section comes from openstack/keystonemiddleware
# It is included here for documentation convenience and may be out of date
# Indicate whether to use oslo_messaging as the notifier. If set to False,
# the local logger will be used as the notifier. If set to True, the
# oslo_messaging package must also be present. Otherwise, the local will be
# used instead.
use_oslo_messaging = True
# The Driver to handle sending notifications. Possible values are messaging,
# messagingv2, routing, log, test, noop. If not specified, then value from
# oslo_messaging_notifications conf section is used.
driver =
# List of AMQP topics used for OpenStack notifications. If not specified,
# then value from oslo_messaging_notifications conf section is used.
topics =
# A URL representing messaging driver to use for notification. If not
# specified, we fall back to the same configuration used for RPC.
transport_url =
[driver_agent]
status_socket_path = /var/run/octavia/status.sock
stats_socket_path = /var/run/octavia/stats.sock
get_socket_path = /var/run/octavia/get.sock
# Maximum time to wait for a status message before checking for shutdown
status_request_timeout = 5
# Maximum number of status processes per driver-agent
status_max_processes = 50
# Maximum time to wait for a stats message before checking for shutdown
stats_request_timeout = 5
# Maximum number of stats processes per driver-agent
stats_max_processes = 50
# Percentage of max_processes (both status and stats) in use to start
# logging warning messages about an overloaded driver-agent.
max_process_warning_percent = .75
# How long in seconds to wait for provider agents to exit before killing them.
provider_agent_shutdown_timeout = 60
# List of enabled provider agents.
enabled_provider_agents =
"""
DEFAULT_OPTIONS = set([
'debug', 'octavia_plugins', 'graceful_shutdown_timeout',
'log_file', 'log_dir', 'policy_file'
])
def test_full_conf():
# Simulate filtering to allow testing filtered data
filtered_content = []
for line in CONF_FILE.strip().splitlines():
if any([f in line for f in VALID_KEYS]):
filtered_content.append(line)
octavia_conf = OctaviaConf(context_wrap('\n'.join(filtered_content)))
assert octavia_conf is not None
assert set(octavia_conf.defaults().keys()) == DEFAULT_OPTIONS
assert octavia_conf.defaults()['debug'] == 'False'
assert octavia_conf.defaults()['octavia_plugins'] == 'hot_plug_plugin'
assert 'api_settings' in octavia_conf
assert set(octavia_conf.items('api_settings').keys()) == set([
'bind_host', 'bind_port', 'auth_strategy', 'allow_pagination', 'allow_sorting',
'pagination_max_limit', 'api_base_uri', 'allow_tls_terminated_listeners',
'allow_ping_health_monitors', 'enabled_provider_drivers', 'default_provider_driver',
'udp_connect_min_interval_health_monitor'
]) | DEFAULT_OPTIONS
assert 'database' in octavia_conf
assert set(octavia_conf.items('database').keys()) == DEFAULT_OPTIONS
assert 'health_manager' in octavia_conf
assert set(octavia_conf.items('health_manager').keys()) == set([
'bind_ip', 'bind_port', 'controller_ip_port_list', 'failover_threads',
'status_update_threads', 'health_update_threads', 'stats_update_threads',
'heartbeat_interval', 'heartbeat_timeout', 'health_check_interval',
'sock_rlimit', 'health_update_driver', 'stats_update_driver'
]) | DEFAULT_OPTIONS
assert 'keystone_authtoken' in octavia_conf
assert set(octavia_conf.items('keystone_authtoken').keys()) == set(['insecure', 'cafile']) | DEFAULT_OPTIONS
assert 'certificates' in octavia_conf
assert set(octavia_conf.items('certificates').keys()) == set([
'cert_generator', 'signing_digest', 'cert_validity_time', 'storage_path',
'cert_manager', 'region_name', 'endpoint_type'
]) | DEFAULT_OPTIONS
assert 'networking' in octavia_conf
assert set(octavia_conf.items('networking').keys()) == set([
'max_retries', 'retry_interval', 'port_detach_timeout', 'allow_vip_network_id',
'allow_vip_subnet_id', 'allow_vip_port_id', 'reserved_ips'
]) | DEFAULT_OPTIONS
assert 'haproxy_amphora' in octavia_conf
assert set(octavia_conf.items('haproxy_amphora').keys()) == set([
'base_path',
'base_cert_dir',
'haproxy_template',
'connection_logging',
'connection_max_retries',
'connection_retry_interval',
'build_rate_limit',
'build_active_retries',
'build_retry_interval',
'haproxy_stick_size',
'bind_host',
'bind_port',
'lb_network_interface',
'haproxy_cmd',
'respawn_count',
'respawn_interval',
'client_cert',
'server_ca',
'use_upstart',
'rest_request_conn_timeout',
'rest_request_read_timeout',
'active_connection_max_retries',
'active_connection_rety_interval',
'user_log_format',
]) | DEFAULT_OPTIONS
assert 'controller_worker' in octavia_conf
assert set(octavia_conf.items('controller_worker').keys()) == set([
'workers',
'amp_active_retries',
'amp_active_wait_sec',
'amp_image_id',
'amp_image_tag',
'amp_image_owner_id',
'amp_flavor_id',
'amp_boot_network_list',
'amp_secgroup_list',
'amp_ssh_access_allowed',
'client_ca',
'amphora_driver',
'compute_driver',
'network_driver',
'volume_driver',
'distributor_driver',
'loadbalancer_topology',
'user_data_config_drive',
]) | DEFAULT_OPTIONS
assert 'task_flow' in octavia_conf
assert set(octavia_conf.items('task_flow').keys()) == set([
'engine',
'max_workers',
'disable_revert',
]) | DEFAULT_OPTIONS
assert 'oslo_messaging' in octavia_conf
assert set(octavia_conf.items('oslo_messaging').keys()) == set([
'rpc_thread_pool_size',
'topic',
]) | DEFAULT_OPTIONS
assert 'oslo_middleware' in octavia_conf
assert set(octavia_conf.items('oslo_middleware').keys()) == set([
'enable_proxy_headers_parsing',
]) | DEFAULT_OPTIONS
assert 'house_keeping' in octavia_conf
assert set(octavia_conf.items('house_keeping').keys()) == set([
'spare_check_interval',
'spare_amphora_pool_size',
'cleanup_interval',
'amphora_expiry_age',
'load_balancer_expiry_age',
]) | DEFAULT_OPTIONS
assert 'amphora_agent' in octavia_conf
assert set(octavia_conf.items('amphora_agent').keys()) == set([
'agent_server_ca',
'agent_server_cert',
'agent_server_network_dir',
'agent_server_network_file',
'agent_request_read_timeout',
'agent_tls_protocol',
'amphora_udp_driver',
'admin_log_targets',
'tenant_log_targets',
'user_log_facility',
'administrative_log_facility',
'log_protocol',
'log_retry_count',
'log_retry_interval',
'log_queue_size',
'logging_template_override',
'forward_all_logs',
'disable_local_log_storage',
]) | DEFAULT_OPTIONS
assert 'keepalived_vrrp' in octavia_conf
assert set(octavia_conf.items('keepalived_vrrp').keys()) == set([
'vrrp_advert_int',
'vrrp_check_interval',
'vrrp_fail_count',
'vrrp_success_count',
'vrrp_garp_refresh_interval',
'vrrp_garp_refresh_count',
]) | DEFAULT_OPTIONS
assert 'service_auth' in octavia_conf
assert set(octavia_conf.items('service_auth').keys()) == set([
'memcached_servers',
'cafile',
'auth_type',
]) | DEFAULT_OPTIONS
assert 'nova' in octavia_conf
assert set(octavia_conf.items('nova').keys()) == set([
'service_name',
'region_name',
'endpoint_type',
'ca_certificates_file',
'insecure',
'random_amphora_name_length',
'availability_zone',
'enable_anti_affinity',
'anti_affinity_policy',
]) | DEFAULT_OPTIONS
assert 'cinder' in octavia_conf
assert set(octavia_conf.items('cinder').keys()) == set([
'service_name',
'region_name',
'endpoint_type',
'availability_zone',
'insecure',
'ca_certificates_file',
'volume_size',
'volume_type',
'volume_create_retry_interval',
'volume_create_timeout',
'volume_create_max_retries',
]) | DEFAULT_OPTIONS
assert 'glance' in octavia_conf
assert set(octavia_conf.items('glance').keys()) == set([
'service_name',
'region_name',
'endpoint_type',
'insecure',
'ca_certificates_file',
]) | DEFAULT_OPTIONS
assert 'neutron' in octavia_conf
assert set(octavia_conf.items('neutron').keys()) == set([
'service_name',
'region_name',
'endpoint_type',
'insecure',
'ca_certificates_file',
]) | DEFAULT_OPTIONS
assert 'quotas' in octavia_conf
assert set(octavia_conf.items('quotas').keys()) == set([
'default_load_balancer_quota',
'default_listener_quota',
'default_member_quota',
'default_pool_quota',
'default_health_monitor_quota',
]) | DEFAULT_OPTIONS
assert 'audit' in octavia_conf
assert set(octavia_conf.items('audit').keys()) == set([
'enabled',
'audit_map_file',
'ignore_req_list',
]) | DEFAULT_OPTIONS
assert 'audit_middleware_notifications' in octavia_conf
assert set(octavia_conf.items('audit_middleware_notifications').keys()) == set([
'use_oslo_messaging',
'driver',
'topics',
]) | DEFAULT_OPTIONS
assert 'driver_agent' in octavia_conf
assert set(octavia_conf.items('driver_agent').keys()) == set([
'status_socket_path',
'stats_socket_path',
'get_socket_path',
'status_request_timeout',
'status_max_processes',
'stats_request_timeout',
'stats_max_processes',
'max_process_warning_percent',
'provider_agent_shutdown_timeout',
'enabled_provider_agents',
]) | DEFAULT_OPTIONS
def test_doc_examples():
env = {
'octavia_conf': OctaviaConf(context_wrap(CONF_FILE)),
}
failed, total = doctest.testmod(octavia_module, globs=env)
assert failed == 0
|
from datetime import timedelta
from unittest import TestCase
from PyProjManCore.proj_stats import ProjStats
from PyProjManCore.task import Task
class TestProjStats(TestCase):
def test_init(self):
stat = ProjStats()
self.assertIsInstance(stat, ProjStats)
def test_name_constructor(self):
name = "Project Name"
stat = ProjStats(name)
self.assertEqual(name, stat.name)
def test_name_attribute(self):
name = "Project Name"
stat = ProjStats()
stat.name = name
self.assertEqual(name, stat.name)
def test_first(self):
stat = ProjStats()
first = Task("First")
stat.first = first
self.assertEqual(first, stat.first)
def test_last(self):
stat = ProjStats()
last = Task("Last")
stat.first = last
self.assertEqual(last, stat.first)
def test_count(self):
stat = ProjStats()
count = 5
stat.count = count
self.assertEqual(count, stat.count)
def test_duration(self):
stat = ProjStats()
duration = timedelta(days=5, hours=10, minutes=3)
stat.duration = duration
self.assertEqual(duration, stat.duration)
def test_tasks_list(self):
stat = ProjStats()
tasks = ["Task 1", "Task 2", "Task 3"]
stat.tasks = tasks
self.assertListEqual(tasks, stat.tasks)
def test_append_to_tasks(self):
stat = ProjStats()
tasks = ["Task 1", "Task 2", "Task 3"]
stat.tasks = tasks
new_task = "New Task"
stat.append_task(new_task)
self.assertIn(new_task, stat.tasks)
def test_delete_from_tasks(self):
stat = ProjStats()
tasks = ["Task 1", "Task 2", "Task 3"]
stat.tasks = tasks
removed_task = "Task 2"
stat.remove_task(removed_task)
self.assertNotIn(removed_task, stat.tasks)
|
#!/usr/bin/python
import git
import sys
import tempfile
import unidiff
from io import StringIO
from plugins import IBPlugin
class UnfuckPatch(object):
"""
Contains the logic to call plugins that reverts unnecessary changes
to the repository.
>>> unfuck = UnfuckPatch(".")
>>> unfuck.clear()
"""
default_processors = [
IBPlugin.process_rect, IBPlugin.process_size, IBPlugin.process_point,
IBPlugin.process_animations
]
def __init__(self, path):
self.repository = git.Repo(path)
def _clear_patch(self, patch, processors):
has_changes = False
for i, patch_piece in enumerate(patch):
length = len(patch_piece)
for j, hunk in enumerate(patch_piece[::-1]):
if not all(p(hunk) for p in processors):
continue
del patch[i][length - j - 1]
has_changes = has_changes or len(patch[i]) > 0
return has_changes
def clear(self, processors=None):
"""
Starts the process of cleaning unnessesary changes using given
processors if no processor is given, we'll use the default ones.
Processors are functions that receive a hunk and return
`True` or `False`, when any processor returns `False`, the hunk is
reverted from the working tree.
"""
processors = processors or self.default_processors
index = self.repository.index
patches = index.diff(None, create_patch=True, unified=0)
for patch in patches:
try:
patch = unidiff.PatchSet(StringIO(patch.diff.decode('utf-8')))
except Exception as e:
print("Unhandled error %s, continuing..." % str(e))
continue
if self._clear_patch(patch, processors):
patchpath = tempfile.mktemp()
open(patchpath, 'w').write(str(patch) + '\n')
self.repository.git.execute(
['git', 'apply', '--recount', '-R', '--unidiff-zero',
'--allow-overlap', patchpath]
)
def main():
repository_path = sys.argv[1] if len(sys.argv) == 2 else "."
try:
unfuck = UnfuckPatch(repository_path)
except git.exc.InvalidGitRepositoryError:
print("Error: Current path is not a git repository\n")
print("Usage: %s <repository path>" % sys.argv[0])
unfuck.clear()
if __name__ == "__main__":
main()
|
import os
import sys
from unittest import expectedFailure
from ..utils import TranspileTestCase, NotImplementedToExpectedFailure
class TimeModuleTests(NotImplementedToExpectedFailure, TranspileTestCase):
#######################################################
# _STRUCT_TM_ITEMS
@expectedFailure
def test__STRUCT_TM_ITEMS(self):
self.assertCodeExecution("""
import time
print(time._STRUCT_TM_ITEMS)
""")
#######################################################
# __doc__
def test___doc__(self):
if sys.hexversion > 0x03060400:
# Docstring was truncated in Python 3.6.4
substitutions = {
'': [
"\n" +
"Variables:\n" +
"\n" +
"timezone -- difference in seconds between UTC and local standard time\n" +
"altzone -- difference in seconds between UTC and local DST time\n" +
"daylight -- whether local time should reflect DST\n" +
"tzname -- tuple of (standard time zone name, DST time zone name)\n" +
"\n" +
"Functions:\n" +
"\n" +
"time() -- return current time in seconds since the Epoch as a float\n" +
"clock() -- return CPU time since process start as a float\n" +
"sleep() -- delay for a number of seconds given as a float\n" +
"gmtime() -- convert seconds since Epoch to UTC tuple\n" +
"localtime() -- convert seconds since Epoch to local time tuple\n" +
"asctime() -- convert time tuple to string\n" +
"ctime() -- convert time in seconds to string\n" +
"mktime() -- convert local time tuple to seconds since Epoch\n" +
"strftime() -- convert time tuple to string according to format specification\n" +
"strptime() -- parse string to time tuple according to format specification\n" +
"tzset() -- change the local timezone"
]
}
else:
substitutions = None
self.assertCodeExecution(
"""
import time
print(time.__doc__)
""",
substitutions=substitutions
)
#######################################################
# __file__
@expectedFailure
def test___file__(self):
self.assertCodeExecution("""
import time
print(time.__file__)
""")
#######################################################
# __loader__
@expectedFailure
def test___loader__(self):
self.assertCodeExecution("""
import time
print(time.__loader__)
""")
#######################################################
# __name__
def test___name__(self):
self.assertCodeExecution("""
import time
print(time.__name__)
""")
#######################################################
# __package__
def test___package__(self):
self.assertCodeExecution("""
import time
print(time.__package__)
""")
#######################################################
# __spec__
@expectedFailure
def test___spec__(self):
self.assertCodeExecution("""
import time
print(time.__spec__)
""")
#######################################################
# altzone
@expectedFailure
def test_altzone(self):
self.assertCodeExecution("""
import time
print(time.altzone)
""")
#######################################################
# asctime
@expectedFailure
def test_asctime(self):
self.assertCodeExecution("""
import time
print(time.asctime())
""")
#######################################################
# clock
def test_clock(self):
# Since we can't know exactly what CPU time will be used,
# and CPU time will vary between implementations,
# this test validates that clock returns a float < 0.01s
sleepy_time = 1
diff_offset = sleepy_time if os.name == 'nt' else 0
# On Windows, time.clock includes the time spent in time.sleep
# however on Unix it does not.
self.assertCodeExecution("""
import time
start = time.clock()
time.sleep({sleepy_time})
end = time.clock()
diff = end - start - {diff_offset}
print(type(diff))
print(diff < 0.1)
""".format(sleepy_time=sleepy_time, diff_offset=diff_offset))
#######################################################
# ctime
def test_ctime(self):
self.assertCodeExecution("""
import time
print(time.ctime()[:10], time.ctime()[-4:])
""")
def test_ctime_with_parameter(self):
self.assertCodeExecution("""
import time
print(time.ctime(0))
print(time.ctime(1000))
now = time.time()
print(time.ctime((now - (now % 3600))))
print(time.ctime(1000.67))
try:
time.ctime('today')
except Exception as e:
print(e)
try:
time.ctime([1,2])
except Exception as e:
print(e)
try:
time.ctime((1,2))
except Exception as e:
print(e)
time.ctime(None)
""")
#######################################################
# daylight
@expectedFailure
def test_daylight(self):
self.assertCodeExecution("""
import time
print(time.daylight)
""")
#######################################################
# get_clock_info
@expectedFailure
def test_get_clock_info(self):
self.assertCodeExecution("""
import time
print(time.get_clock_info())
""")
#######################################################
# gmtime
@expectedFailure
def test_gmtime(self):
self.assertCodeExecution("""
import time
print(time.gmtime())
""")
#######################################################
# localtime
@expectedFailure
def test_localtime(self):
self.assertCodeExecution("""
import time
print(time.localtime())
""")
#######################################################
# mktime
@expectedFailure
def test_mktime(self):
self.assertCodeExecution("""
import time
print(time.mktime())
""")
#######################################################
# monotonic
@expectedFailure
def test_monotonic(self):
self.assertCodeExecution("""
import time
print(time.monotonic())
""")
#######################################################
# perf_counter
@expectedFailure
def test_perf_counter(self):
self.assertCodeExecution("""
import time
print(time.perf_counter())
""")
#######################################################
# process_time
@expectedFailure
def test_process_time(self):
self.assertCodeExecution("""
import time
print(time.process_time())
""")
#######################################################
# sleep
def test_sleep(self):
self.assertCodeExecution("""
import time
print(time.sleep(1))
""")
#######################################################
# strftime
@expectedFailure
def test_strftime(self):
self.assertCodeExecution("""
import time
print(time.strftime())
""")
#######################################################
# strptime
@expectedFailure
def test_strptime(self):
self.assertCodeExecution("""
import time
print(time.strptime())
""")
#######################################################
# struct_time
@expectedFailure
def test_struct_time(self):
self.assertCodeExecution("""
import time
print(time.struct_time())
""")
#######################################################
# time
def test_time(self):
self.assertCodeExecution("""
import time
print(int(time.time() / 10000))
""")
#######################################################
# timezone
@expectedFailure
def test_timezone(self):
self.assertCodeExecution("""
import time
print(time.timezone)
""")
#######################################################
# tzname
@expectedFailure
def test_tzname(self):
self.assertCodeExecution("""
import time
print(time.tzname)
""")
#######################################################
# tzset
@expectedFailure
def test_tzset(self):
self.assertCodeExecution("""
import time
print(time.tzset())
""")
not_implemented_versions = {
'test_clock': (3.7, )
}
|
"""
Proxy a configuration value. Defers the lookup until the value is used, so that
values can be read statically at import time.
"""
import functools
import operator
from staticconf import errors
import six
class UndefToken(object):
"""A token to represent an undefined value, so that None can be used
as a default value.
"""
def __repr__(self):
return "<Undefined>"
UndefToken = UndefToken()
_special_names = [
'__abs__', '__add__', '__and__', '__bool__', '__call__', '__cmp__',
'__coerce__',
'__contains__', '__delitem__', '__delslice__', '__div__', '__divmod__',
'__eq__', '__float__', '__floordiv__', '__ge__', '__getitem__',
'__getslice__', '__gt__', '__hash__', '__hex__', '__iadd__', '__iand__',
'__idiv__', '__idivmod__', '__ifloordiv__', '__ilshift__', '__imod__',
'__imul__', '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
'__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__', '__len__',
'__long__', '__lshift__', '__lt__', '__mod__', '__mul__', '__ne__',
'__neg__', '__oct__', '__or__', '__pos__', '__pow__', '__radd__',
'__rand__', '__rdiv__', '__rdivmod__',
'__repr__', '__reversed__', '__rfloorfiv__', '__rlshift__', '__rmod__',
'__rmul__', '__ror__', '__rpow__', '__rrshift__', '__rshift__', '__rsub__',
'__rtruediv__', '__rxor__', '__setitem__', '__setslice__', '__sub__',
'__truediv__', '__xor__', 'next', '__nonzero__', '__str__', '__unicode__',
'__index__', '__fspath__',
]
def identity(x):
return x
unary_funcs = {
'__unicode__': six.text_type,
'__str__': str,
'__fspath__': identity, # python3.6+ os.PathLike interface
'__repr__': repr,
'__nonzero__': bool, # Python2 bool
'__bool__': bool, # Python3 bool
'__hash__': hash,
}
def build_class_def(cls):
def build_method(name):
def method(self, *args, **kwargs):
if name in unary_funcs:
return unary_funcs[name](self.value)
if hasattr(operator, name):
return getattr(operator, name)(self.value, *args)
return getattr(self.value, name)(*args, **kwargs)
return method
namespace = dict((name, build_method(name)) for name in _special_names)
return type(cls.__name__, (cls,), namespace)
def cache_as_field(cache_name):
"""Cache a functions return value as the field 'cache_name'."""
def cache_wrapper(func):
@functools.wraps(func)
def inner_wrapper(self, *args, **kwargs):
value = getattr(self, cache_name, UndefToken)
if value != UndefToken:
return value
ret = func(self, *args, **kwargs)
setattr(self, cache_name, ret)
return ret
return inner_wrapper
return cache_wrapper
def extract_value(proxy):
"""Given a value proxy type, Retrieve a value from a namespace, raising
exception if no value is found, or the value does not validate.
"""
value = proxy.namespace.get(proxy.config_key, proxy.default)
if value is UndefToken:
raise errors.ConfigurationError("%s is missing value for: %s" %
(proxy.namespace, proxy.config_key))
try:
return proxy.validator(value)
except errors.ValidationError as e:
raise errors.ConfigurationError("%s failed to validate %s: %s" %
(proxy.namespace, proxy.config_key, e))
class ValueProxy(object):
"""Proxy a configuration value so it can be loaded after import time."""
__slots__ = [
'validator',
'config_key',
'default',
'_value',
'namespace',
'__weakref__'
]
@classmethod
@cache_as_field('_class_def')
def get_class_def(cls):
return build_class_def(cls)
def __new__(cls, *args, **kwargs):
"""Create instances of this class with proxied special names."""
klass = cls.get_class_def()
instance = object.__new__(klass)
klass.__init__(instance, *args, **kwargs)
return instance
def __init__(self, validator, namespace, key, default=UndefToken):
self.validator = validator
self.config_key = key
self.default = default
self.namespace = namespace
self._value = UndefToken
@cache_as_field('_value')
def get_value(self):
return extract_value(self)
value = property(get_value)
def __getattr__(self, item):
return getattr(self.value, item)
def reset(self):
"""Clear the cached value so that configuration can be reloaded."""
self._value = UndefToken
|
from uci_comparison import compare_estimators
from sklearn.ensemble.forest import RandomForestClassifier, ExtraTreesClassifier
from rr_forest import RRForestClassifier
from rr_extra_forest import RRExtraTreesClassifier
estimators = {
'RandomForest': RandomForestClassifier(n_estimators=20),
'RndRotForest': RRForestClassifier(n_estimators=20),
'ExtraTrees': ExtraTreesClassifier(n_estimators=20),
'RndRotETrees': RRExtraTreesClassifier(n_estimators=20),
}
# optionally, pass a list of UCI dataset identifiers as the datasets parameter, e.g. datasets=['iris', 'diabetes']
# optionally, pass a dict of scoring functions as the metric parameter, e.g. metrics={'F1-score': f1_score}
compare_estimators(estimators) |
#!/usr/bin/env python3
from collections import Counter
import itertools
class SeatMap:
"""Seat map class."""
def __init__(self, data):
"""Init."""
self.data = data
self.width = len(data[0]) - 1
self.length = len(data) - 1
def render(self, data=None):
"""Render data as floor map."""
if not data:
data = self.data
for line in data:
print(''.join(line))
print()
def run(self):
"""Run exercise."""
output = []
for y, row in enumerate(self.data):
new_row = []
for x, seat in enumerate(row):
seats = self.adj_seats(x, y)
occupied = seats.get('#', 0)
if seat == 'L':
if occupied == 0:
new_row.append('#')
else:
new_row.append(seat)
elif seat == '#':
if occupied >= 4:
new_row.append('L')
else:
new_row.append(seat)
else:
new_row.append(seat)
output.append(new_row)
return output
def adj_seats(self, x, y):
"""Find adjacent seats."""
seats = []
if y > 0:
seats.append(self.data[y-1][x]) # up
if x > 0:
seats.append(self.data[y-1][x-1]) # up left
if y < self.length:
seats.append(self.data[y+1][x]) # down
if x > 0:
seats.append(self.data[y+1][x-1]) # down left
if x > 0:
seats.append(self.data[y][x-1]) # left
if x < self.width:
seats.append(self.data[y][x+1]) # right
if y < self.length:
seats.append(self.data[y+1][x+1]) # down right
if y > 0 and x < self.width:
seats.append(self.data[y-1][x+1]) # up right
return Counter(seats)
def occupied_seats(self):
"""Get empty seats from map."""
c = Counter(itertools.chain(*self.data))
return c.get('#', 0)
def main():
"""Main"""
with open('input.txt') as fp:
data = [[s for s in line] for line in fp.read().splitlines()]
s = SeatMap(data)
old = None
while True:
new = s.run()
if new == old:
s.render(new)
print('part 1 answer:', s.occupied_seats())
break
s.data = new
old = new
if __name__ == '__main__':
main()
|
"""Standalone tool to print QR codes.
Usage:
QRCodePrinter.py -p=<URL_prefix> -s <nnn> -c <nnn> -o <file>
QRCodePrinter.py -h | --help
QRCodePrinter.py --version
Options:
-p <URL_prefix>, --prefix=<URL_prefix> The URL prefix for the box number
-s <nnn>, --start=<nnn> Starting box number to use
-c <nnn>, --count=<nnn> Number of QR codes to print
-o <file>, --output=<file> Output file name
-h, --help Show this help and quit.
-v, --version Show the version of this program and quit.
"""
import logging
import logging.config
from dataclasses import dataclass, astuple, InitVar
from logging import getLogger, debug, error
from pathlib import Path
from typing import Any, Union, Optional, NamedTuple
from docopt import docopt
import pyqrcode
import png
import reportlab
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.pagesizes import letter
from reportlab.lib.units import inch
from reportlab.platypus import Image
from sqlalchemy import create_engine, MetaData, Table
from sqlalchemy.sql import select
import yaml # from PyYAML library
from FPIDjango.private import settings_private
__author__ = 'Travis Risner'
__project__ = "Food-Pantry-Inventory"
__creation_date__ = "05/22/2019"
# Copyright 2019 by Travis Risner - MIT License
"""
Assuming:
- letter size paper
- portrait orientation
- 1/2 inch outer margin on all sides
- all measurements in points (1 pt = 1/72 in)
- 3 labels across
- 4 labels down
- each label has 1/4 in margin on all sides
- 0, 0 of axis is in lower left corner
"""
log = None
@dataclass()
class Point:
"""
Horizontal (x) and vertical (y) coordinate.
"""
x: int
y: int
LABEL_SIZE: Point = Point(144, 144) # 2 in x 2 in
LABEL_MARGIN: Point = Point(18, 18) # 1/4 in x 1/4 in
BACKGROUND_SIZE: Point = Point(
LABEL_SIZE.x + (LABEL_MARGIN.x * 2),
LABEL_SIZE.y + (LABEL_MARGIN.y * 2))
PAGE_OFFSET: Point = Point(36, 36) # 1/2 in x 1/2 in
TITLE_ADJUSTMENT: Point = Point(+20, -9)
@dataclass
class LabelPosition:
"""
Container for measurements for one label.
All measurements are in points.
x denotes horizontal measurement
y denotes vertical
origin is in lower left corner
label is assumed to be 2 in x 2 in ( 144 pt x 144 pt)
"""
page_offset: InitVar[Point]
lower_left_offset: Point = Point(0, 0)
lower_right_offset: Point = Point(0, 0)
upper_left_offset: Point = Point(0, 0)
upper_right_offset: Point = Point(0, 0)
offset_on_page: Point = Point(0, 0)
image_start: Point = Point(0, 0)
title_start: Point = Point(0, 0)
def __post_init__(self, page_offset: Point):
"""
Adjust offsets based on offset_on_page.
:param page_offset: offset (in points) from the lower left corner
:return:
"""
self.offset_on_page = page_offset
x: int = page_offset.x
y: int = page_offset.y
offset: Point = Point(x, y)
self.lower_left_offset = offset
x = page_offset.x + BACKGROUND_SIZE.x
y = page_offset.y
offset: Point = Point(x, y)
self.lower_right_offset = offset
x = page_offset.x
y = page_offset.y + BACKGROUND_SIZE.y
offset: Point = Point(x, y)
self.upper_left_offset = offset
x = page_offset.x + BACKGROUND_SIZE.x
y = page_offset.y + BACKGROUND_SIZE.y
offset: Point = Point(x, y)
self.upper_right_offset = offset
x = self.lower_left_offset.x + LABEL_MARGIN.x
y = self.lower_left_offset.y + LABEL_MARGIN.y
self.image_start: Point = Point(x, y)
# title placement calculation
x = self.upper_left_offset.x + (LABEL_SIZE.x // 2)
y = self.upper_left_offset.y - LABEL_MARGIN.y
self.title_start: Point = Point(x, y)
return
class QRCodePrinterClass:
"""
QRCodePrinterClass - Print QR Codes
"""
def __init__(self, workdir: Path):
self.working_dir: Path = None
self.url_prefix: str = ''
self.box_start: int = 0
self.label_count: int = 0
self.output_file: str = ''
self.full_path: Path = None
self.pdf: Canvas = None
# width and height are in points (1/72 inch)
self.width: int = None
self.height: int = None
# database connection information
self.con = None
self.meta: MetaData = None
self.box: Table = None
# label locations on the page
self.label_locations: list(LabelPosition) = list()
self.compute_box_dimensions()
# set this to the last position in the list to force a new page
self.next_pos: int = len(self.label_locations)
# use the page number to control first page handling
self.page_number: int = 0
if not workdir is None and workdir.is_dir():
self.working_dir = workdir
return
def run_QRPrt(self, parameters: dict):
"""
Top method for running Run the QR code printer..
:param parameters: dictionary of command line arguments
:return:
"""
parm_dict = parameters
self.url_prefix: str = parm_dict['--prefix'].strip('\'"')
self.box_start: int = int(parm_dict['--start'])
self.label_count: int = int(parm_dict['--count'])
self.output_file: str = parm_dict['--output']
if (not isinstance(self.box_start, int)) or \
self.box_start <= 0:
raise ValueError('Box start must be a positive integer')
if (not isinstance(self.label_count, int)) or \
self.label_count <= 0:
raise ValueError('Label count must be a positive integer')
full_path = self.working_dir / self.output_file
if full_path.exists():
raise ValueError('File already exists')
else:
self.full_path = full_path
debug(
f'Parameters validated: pfx: {self.url_prefix}, '
f'start: {self.box_start}, '
f'count: {self.label_count}, '
f'file: {self.output_file}'
)
self.connect_to_generate_labels()
return
def connect_to_generate_labels(self):
"""
Connect to the database and generate labels.
:return:
"""
# establish access to the database
self.con, self.meta = self.connect(
user=settings_private.DB_USER,
password=settings_private.DB_PSWD,
db=settings_private.DB_NAME,
host=settings_private.DB_HOST,
port=settings_private.DB_PORT
)
# establish access to the box table
self.box = Table(
'fpiweb_box',
self.meta,
autoload=True,
autoload_with=self.con)
self.generate_label_pdf()
# self.con.close()
return
def connect(self, user, password, db, host='localhost', port=5432):
"""
Establish a connection to the desired PostgreSQL database.
:param user:
:param password:
:param db:
:param host:
:param port:
:return:
"""
# We connect with the help of the PostgreSQL URL
# postgresql://federer:grandestslam@localhost:5432/tennis
url = f'postgresql://{user}:{password}@{host}:{port}/{db}'
# The return value of create_engine() is our connection object
con = create_engine(url, client_encoding='utf8')
# We then bind the connection to MetaData()
meta = MetaData(bind=con)
return con, meta
def generate_label_pdf(self):
"""
Generate the pdf file with the requested labels in it.
:return:
"""
self.initialize_pdf_file()
self.fill_pdf_pages()
self.finalize_pdf_file()
return
def initialize_pdf_file(self):
"""
Setup the pdf to receive labels.
:return:
"""
self.pdf = Canvas(str(self.full_path), pagesize=letter)
self.width, self.height = letter
return
def compute_box_dimensions(self):
"""
Compute the dimensions and bounding boxes for each label on the page.
:return:
"""
vertical_start = (BACKGROUND_SIZE.y * 3) + PAGE_OFFSET.y
horizontal_stop = (BACKGROUND_SIZE.x * 3) + PAGE_OFFSET.x - 1
for vertical_position in range(vertical_start, -1,
-BACKGROUND_SIZE.y):
for horizontal_position in range(PAGE_OFFSET.x,
horizontal_stop,
BACKGROUND_SIZE.x):
new_label = LabelPosition(Point(horizontal_position,
vertical_position))
self.label_locations.append(new_label)
return
def fill_pdf_pages(self):
"""
Fill one or more pages with labels.
:return:
"""
# # draw lines around the boxes that will be filled with labels
# self.draw_boxes_on_page()
# # self.pdf.setFillColorRGB(1, 0, 1)
# # self.pdf.rect(2*inch, 2*inch, 2*inch, 2*inch, fill=1)
for label_file, label_name in self.get_next_qr_img():
debug(f'Got {label_file}')
if self.next_pos >= len(self.label_locations) - 1:
self. finish_page()
self.next_pos = 0
else:
self.next_pos += 1
self.draw_bounding_box(self.next_pos)
self.place_label(label_file, label_name, self.next_pos)
self.finish_page()
return
def place_label(self, file_name: str, label_name: str, pos: int):
"""
Place the label in the appropriate location on the page.
:param file_name:
:param label_name:
:param pos:
:return:
"""
box_info = self.label_locations[pos]
# place image on page
im = Image(file_name, LABEL_SIZE.x, LABEL_SIZE.y)
im.drawOn(self.pdf, box_info.image_start.x, box_info.image_start.y)
# place title above image
self.pdf.setFont('Helvetica-Bold', 12)
self.pdf.drawCentredString(
box_info.title_start.x + TITLE_ADJUSTMENT.x,
box_info.title_start.y + TITLE_ADJUSTMENT.y,
label_name
)
return
def finish_page(self):
"""
Finish off the prefious page before starting a new one
"""
if self.page_number > 0:
self.pdf.showPage()
self.page_number += 1
return
def draw_bounding_box(self, label_pos: int):
"""
Draw a bounding box around the specified label.
:param label_pos: position in the labels locations list.
:return:
"""
box_info = self.label_locations[label_pos]
self.pdf.line(box_info.upper_left_offset.x,
box_info.upper_left_offset.y,
box_info.upper_right_offset.x,
box_info.upper_right_offset.y)
self.pdf.line(box_info.upper_right_offset.x,
box_info.upper_right_offset.y,
box_info.lower_right_offset.x,
box_info.lower_right_offset.y)
self.pdf.line(box_info.lower_right_offset.x,
box_info.lower_right_offset.y,
box_info.lower_left_offset.x,
box_info.lower_left_offset.y)
self.pdf.line(box_info.lower_left_offset.x,
box_info.lower_left_offset.y,
box_info.upper_left_offset.x,
box_info.upper_left_offset.y)
return
def get_next_qr_img(self) -> (str, str):
"""
Build the QR image for the next box label.
:return: a QR code image ready to print
"""
for url, label in self.get_next_box_url():
label_file_name = f'{label}.png'
qr = pyqrcode.create(url)
qr.png(label_file_name, scale=5)
yield label_file_name, label
return
def get_next_box_url(self) -> (str, str):
"""
Build the URL for the next box.
:return:
"""
for label, box_number in self.get_next_box_number():
debug(f'Got {label}, {box_number}')
url = f"{self.url_prefix}{box_number:05}"
yield url, label
return
def get_next_box_number(self) -> (str, int):
"""
Search for the next box number to go on a label.
:return:
"""
next_box_number = self.box_start
available_count = 0
while available_count < self.label_count:
box_label = f'BOX{next_box_number:05}'
debug(f'Attempting to get {box_label}')
sel_box_stm = select([self.box]).where(
self.box.c.box_number == box_label)
# box_found = exists(sel_box_stm)
# exist_stm = exists().where(self.box.c.box_number == box_label)
result = self.con.execute(sel_box_stm)
debug(f'Search result: {result.rowcount}')
box = result.fetchone()
if not box:
# found a hole in the numbers
available_count += 1
debug(f'{box_label} not found - using for label')
yield (box_label, next_box_number)
else:
result.close()
next_box_number += 1
return
def finalize_pdf_file(self):
"""
All pages have been generated so flush all buffers and close.
:return:
"""
self.pdf.save()
return
class Main:
"""
Main class to start things rolling.
"""
def __init__(self):
"""
Get things started.
"""
self.QRCodePtr: QRCodePrinterClass = None
self.working_dir: Path = None
return
def run_QRCodePtr(self, arguments: dict):
"""
Prepare to run Run the QR code printer..
:return:
"""
self.QRCodePtr = QRCodePrinterClass(workdir=self.working_dir)
debug('Starting up QRCodePtr')
self.QRCodePtr.run_QRPrt(arguments)
return
def start_logging(self, work_dir: Path, debug_name: str):
"""
Establish the logging for all the other scripts.
:param work_dir:
:param debug_name:
:return: (nothing)
"""
# Set flag that no logging has been established
logging_started = False
# find our working directory and possible logging input file
_workdir = work_dir
_logfilename = debug_name
# obtain the full path to the log information
_debugConfig = _workdir / _logfilename
# verify that the file exists before trying to open it
if Path.exists(_debugConfig):
try:
# get the logging params from yaml file and instantiate a log
with open(_logfilename, 'r') as _logdictfd:
_logdict = yaml.load(_logdictfd, Loader=yaml.SafeLoader)
logging.config.dictConfig(_logdict)
logging_started = True
except Exception as xcp:
print(f'The file {_debugConfig} exists, but does not contain '
f'appropriate logging directives.')
raise ValueError('Invalid logging directives.')
else:
print(f'Logging directives file {_debugConfig} either not '
f'specified or not found')
if not logging_started:
# set up minimal logging
_logfilename = 'debuginfo.txt'
_debugConfig = _workdir / _logfilename
logging.basicConfig(filename='debuginfo.txt', level=logging.INFO,
filemode='w')
print(f'Minimal logging established to {_debugConfig}')
# start logging
global log
log = logging.getLogger(__name__)
logging.info(f'Logging started: working directory is {_workdir}')
# sset confirmed working directory to pass on to target class
self.working_dir = _workdir
return
if __name__ == "__main__":
arguments = docopt(__doc__, version='QRCodePrinter 1.0')
workdir = Path.cwd()
debug_file_name = 'debug_info.yaml'
main = Main()
main.start_logging(workdir, debug_file_name)
debug('Parameters as interpreted by docopt')
for arg in arguments:
debug(f'arg key: {arg}, value: {arguments[arg]}')
main.run_QRCodePtr(arguments)
# EOF
|
from . uuid64 import *
|
import json
import re
from accessstats.client import ThriftClient
REGEX_ISSN = re.compile("^[0-9]{4}-[0-9]{3}[0-9xX]$")
REGEX_ISSUE = re.compile("^[0-9]{4}-[0-9]{3}[0-9xX][0-2][0-9]{3}[0-9]{4}$")
REGEX_ARTICLE = re.compile("^S[0-9]{4}-[0-9]{3}[0-9xX][0-2][0-9]{3}[0-9]{4}[0-9]{5}$")
def _code_type(code):
if not code:
return None
if REGEX_ISSN.match(code):
return 'issn'
if REGEX_ISSUE.match(code):
return 'issue'
if REGEX_ARTICLE.match(code):
return 'pid'
def _compute_downloads_per_year(query_result):
result = []
for item in query_result['aggregations']['access_year']['buckets']:
result.append(
(item['key'], int(item['access_total']['value']))
)
return result
def downloads_per_year(collection, code, raw=False):
"""
This method retrieve the total of downloads per year.
arguments
collection: SciELO 3 letters Acronym
code: (Journal ISSN, Issue PID, Article PID)
return
[
("2017", "20101"),
("2016", "11201"),
("2015", "12311"),
...
]
"""
tc = ThriftClient()
body = {"query": {"filtered": {}}}
fltr = {}
query = {
"query": {
"bool": {
"must": [
{
"match": {
"collection": collection
}
}
]
}
}
}
aggs = {
"aggs": {
"access_year": {
"terms": {
"field": "access_year",
"size": 0,
"order": {
"_term": "asc"
}
},
"aggs": {
"access_total": {
"sum": {
"field": "access_total"
}
}
}
}
}
}
body['query']['filtered'].update(fltr)
body['query']['filtered'].update(query)
body.update(aggs)
code_type = _code_type(code)
if code_type:
query["query"]["bool"]["must"].append({
"match": {
code_type: code
}
})
query_parameters = [
('size', '0')
]
query_result = tc.search(json.dumps(body), query_parameters)
return query_result if raw is True else _compute_downloads_per_year(query_result)
|
import json
import jsonschema
import typer
from jsonschema import validate
app = typer.Typer()
def get_schema(schema_location):
with open(schema_location, 'r') as schema_file:
schema = json.load(schema_file)
return schema
def get_data(json_data_location):
with open(json_data_location, 'r') as data_file:
schema = json.load(data_file)
return schema
@app.command()
def validate_json(json_data_location, schema_location):
json_schema = get_schema(schema_location)
json_data = get_data(json_data_location)
try:
validate(instance=json_data, schema=json_schema)
except jsonschema.exceptions.ValidationError as err:
print(err)
err = "Invalid JSON"
return False, err
message = "Valid JSON"
print(message)
def run_app():
app() |
# ========================================================= #
import requests
import httpx
from typing import Union
from requests.models import Response
from httpx import Response as HttpxResponse
from enum import Enum
# ========================================================= #
class BaseClient:
"""
These docs are not meant for general users. These are library API references. The actual docs will be
available on the index page when they are prepared.
This is the **base client class** for all other REST clients which inherit from this class and implement their own
endpoints on top of it.
"""
def __init__(self, api_key: str, connect_timeout: int = 10, read_timeout: int = 10):
"""
Initiates a Client to be used to access all the endpoints.
:param api_key: Your API Key. Visit your dashboard to get yours.
:param connect_timeout: The connection timeout in seconds. Defaults to 10. basically the number of seconds to
wait for a connection to be established. Raises a ``ConnectTimeout`` if unable to
connect within specified time limit.
:param read_timeout: The read timeout in seconds. Defaults to 10. basically the number of seconds to wait for
date to be received. Raises a ``ReadTimeout`` if unable to connect within the specified
time limit.
"""
self.KEY = api_key
self.BASE = 'https://api.polygon.io'
self.time_out_conf = (connect_timeout, read_timeout)
self.session = requests.session()
self.session.headers.update({'Authorization': f'Bearer {self.KEY}'})
# Context Managers
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.session.close()
def close(self):
"""
Closes the ``requests.Session`` and frees up resources. It is recommended to call this method in your
exit handlers
"""
self.session.close()
# Internal Functions
def _get_response(self, path: str, params: dict = None,
raw_response: bool = True) -> Union[Response, dict]:
"""
Get response on a path. Meant to be used internally but can be used if you know what you're doing
:param path: RESTful path for the endpoint. Available on the docs for the endpoint right above its name.
:param params: Query Parameters to be supplied with the request. These are mapped 1:1 with the endpoint.
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to check the
status code or inspect the headers. Defaults to True which returns the ``Response`` object.
:return: A Response object by default. Make ``raw_response=False`` to get JSON decoded Dictionary
"""
_res = self.session.request('GET', self.BASE + path, params=params, timeout=self.time_out_conf)
if raw_response:
return _res
return _res.json()
def get_next_page_by_url(self, url: str, raw_response: bool = False) -> Union[Response, dict]:
"""
Get the next page of a response. The URl is returned within ``next_url`` attribute on endpoints which support
pagination (eg the tickers endpoint). If the response doesn't contain this attribute, either all pages were
received or the endpoint doesn't have pagination. Meant for internal use primarily.
:param url: The next URL. As contained in ``next_url`` of the response.
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to say check the
status code or inspect the headers. Defaults to False which returns the json decoded
dictionary.
:return: Either a Dictionary or a Response object depending on value of raw_response. Defaults to Dict.
"""
_res = self.session.request('GET', url)
if raw_response:
return _res
return _res.json()
def get_next_page(self, old_response: Union[Response, dict],
raw_response: bool = False) -> Union[Response, dict, bool]:
"""
Get the next page using the most recent old response. This function simply parses the next_url attribute
from the existing response and uses it to get the next page. Returns False if there is no next page
remaining (which implies that you have reached the end of all pages or the endpoint doesn't support pagination).
:param old_response: The most recent existing response. Can be either ``Response`` Object or Dictionaries
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to say check the
status code or inspect the headers. Defaults to False which returns the json decoded
dictionary.
:return: A JSON decoded Dictionary by default. Make ``raw_response=True`` to get underlying response object
"""
try:
if not isinstance(old_response, dict):
old_response = old_response.json()
_next_url = old_response['next_url']
return self.get_next_page_by_url(_next_url, raw_response=raw_response)
except KeyError:
return False
def get_previous_page(self, old_response: Union[Response, dict],
raw_response: bool = False) -> Union[Response, dict, bool]:
"""
Get the previous page using the most recent old response. This function simply parses the previous_url attribute
from the existing response and uses it to get the previous page. Returns False if there is no previous page
remaining (which implies that you have reached the start of all pages or the endpoint doesn't support
pagination).
:param old_response: The most recent existing response. Can be either ``Response`` Object or Dictionaries
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to say check the
status code or inspect the headers. Defaults to False which returns the json decoded
dictionary.
:return: A JSON decoded Dictionary by default. Make ``raw_response=True`` to get underlying response object
"""
try:
if not isinstance(old_response, dict):
old_response = old_response.json()
_next_url = old_response['next_url']
return self.get_next_page_by_url(_next_url, raw_response=raw_response)
except KeyError:
return False
@staticmethod
def _change_enum(val: Union[str, Enum, float, int], allowed_type=str):
if isinstance(val, Enum):
try:
return val.value
except AttributeError:
raise ValueError(f'The value supplied: ({val}) does not match the required type: ({allowed_type}). '
f'Please consider using the specified enum in the docs for this function or recheck '
f'the value supplied.')
if isinstance(allowed_type, list):
if type(val) in allowed_type:
return val
raise ValueError(f'The value supplied: ({val}) does not match the required type: ({allowed_type}). '
f'Please consider using the specified enum in the docs for this function or recheck '
f'the value supplied.')
if isinstance(val, allowed_type) or val is None:
return val
# ========================================================= #
class BaseAsyncClient:
"""
These docs are not meant for general users. These are library API references. The actual docs will be
available on the index page when they are prepared.
This is the **base async client class** for all other REST clients which inherit from this class and implement
their own endpoints on top of it.
"""
def __init__(self, api_key: str, connect_timeout: int = 10, read_timeout: int = 10):
"""
Initiates a Client to be used to access all the endpoints.
:param api_key: Your API Key. Visit your dashboard to get yours.
:param connect_timeout: The connection timeout in seconds. Defaults to 10. basically the number of seconds to
wait for a connection to be established. Raises a ``ConnectTimeout`` if unable to
connect within specified time limit.
:param read_timeout: The read timeout in seconds. Defaults to 10. basically the number of seconds to wait for
date to be received. Raises a ``ReadTimeout`` if unable to connect within the specified
time limit.
"""
self.KEY = api_key
self.BASE = 'https://api.polygon.io'
self.time_out_conf = httpx.Timeout(connect=connect_timeout, read=read_timeout, pool=10,
write=10)
self.session = httpx.AsyncClient(timeout=self.time_out_conf)
self.session.headers.update({'Authorization': f'Bearer {self.KEY}'})
# Context Managers
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.session.aclose()
async def close(self):
"""
Closes the ``httpx.AsyncClient`` and frees up resources. It is recommended to call this method in your
exit handlers. This method should be awaited as this is a coroutine.
"""
await self.session.aclose()
# Internal Functions
async def _get_response(self, path: str, params: dict = None,
raw_response: bool = True) -> Union[HttpxResponse, dict]:
"""
Get response on a path - meant to be used internally but can be used if you know what you're doing
:param path: RESTful path for the endpoint. Available on the docs for the endpoint right above its name.
:param params: Query Parameters to be supplied with the request. These are mapped 1:1 with the endpoint.
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to check the
status code or inspect the headers. Defaults to True which returns the ``Response`` object.
:return: A Response object by default. Make ``raw_response=False`` to get JSON decoded Dictionary
"""
_res = await self.session.request('GET', self.BASE + path, params=params)
if raw_response:
return _res
return _res.json()
async def get_next_page_by_url(self, url: str, raw_response: bool = False) -> Union[HttpxResponse, dict]:
"""
Get the next page of a response. The URl is returned within ``next_url`` attribute on endpoints which support
pagination (eg the tickers endpoint). If the response doesn't contain this attribute, either all pages were
received or the endpoint doesn't have pagination. Meant for internal use primarily.
:param url: The next URL. As contained in ``next_url`` of the response.
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to say check the
status code or inspect the headers. Defaults to False which returns the json decoded
dictionary.
:return: Either a Dictionary or a Response object depending on value of raw_response. Defaults to Dict.
"""
_res = await self.session.request('GET', url)
if raw_response:
return _res
return _res.json()
async def get_next_page(self, old_response: Union[HttpxResponse, dict],
raw_response: bool = False) -> Union[HttpxResponse, dict, bool]:
"""
Get the next page using the most recent old response. This function simply parses the next_url attribute
from the existing response and uses it to get the next page. Returns False if there is no next page
remaining (which implies that you have reached the end of all pages or the endpoint doesn't support
pagination) - Async method
:param old_response: The most recent existing response. Can be either ``Response`` Object or Dictionaries
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to say check the
status code or inspect the headers. Defaults to False which returns the json decoded
dictionary.
:return: A JSON decoded Dictionary by default. Make ``raw_response=True`` to get underlying response object
"""
try:
if not isinstance(old_response, dict):
old_response = old_response.json()
_next_url = old_response['next_url']
return await self.get_next_page_by_url(_next_url, raw_response=raw_response)
except KeyError:
return False
async def get_previous_page(self, old_response: Union[HttpxResponse, dict],
raw_response: bool = False) -> Union[HttpxResponse, dict, bool]:
"""
Get the previous page using the most recent old response. This function simply parses the previous_url attribute
from the existing response and uses it to get the previous page. Returns False if there is no previous page
remaining (which implies that you have reached the start of all pages or the endpoint doesn't support
pagination) - Async method
:param old_response: The most recent existing response. Can be either ``Response`` Object or Dictionaries
:param raw_response: Whether or not to return the ``Response`` Object. Useful for when you need to say check the
status code or inspect the headers. Defaults to False which returns the json decoded
dictionary.
:return: A JSON decoded Dictionary by default. Make ``raw_response=True`` to get underlying response object
"""
try:
if not isinstance(old_response, dict):
old_response = old_response.json()
_next_url = old_response['next_url']
return await self.get_next_page_by_url(_next_url, raw_response=raw_response)
except KeyError:
return False
@staticmethod
def _change_enum(val: Union[str, Enum, float, int], allowed_type=str):
if isinstance(val, Enum):
try:
return val.value
except AttributeError:
raise ValueError(f'The value supplied: ({val}) does not match the required type: ({allowed_type}). '
f'Please consider using the specified enum in the docs for this function or recheck '
f'the value supplied.')
if isinstance(allowed_type, list):
if type(val) in allowed_type:
return val
raise ValueError(f'The value supplied: ({val}) does not match the required type: ({allowed_type}). '
f'Please consider using the specified enum in the docs for this function or recheck '
f'the value supplied.')
if isinstance(val, allowed_type) or val is None:
return val
# ========================================================= #
if __name__ == '__main__': # Tests
print('Don\'t You Dare Running Lib Files Directly')
# ========================================================= #
|
# Generated by Django 2.0.9 on 2018-12-21 01:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('registers', '0012_auto_20181219_1210'),
]
operations = [
migrations.AddField(
model_name='itsystem',
name='database_server',
field=models.TextField(blank=True, help_text="Database server(s) that host this system's data"),
),
migrations.AlterField(
model_name='changerequest',
name='approver',
field=models.ForeignKey(blank=True, help_text='The person who will endorse this change prior to CAB', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='approver', to='organisation.DepartmentUser'),
),
migrations.AlterField(
model_name='changerequest',
name='requester',
field=models.ForeignKey(blank=True, help_text='The person who is requesting this change', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='requester', to='organisation.DepartmentUser'),
),
migrations.AlterField(
model_name='itsystem',
name='availability',
field=models.PositiveIntegerField(blank=True, choices=[(1, '24/7/365'), (2, 'Business hours')], help_text='Expected availability for this system', null=True),
),
migrations.AlterField(
model_name='itsystem',
name='biller_code',
field=models.CharField(blank=True, help_text='BPAY biller code for this system (must be unique).', max_length=64, null=True),
),
migrations.AlterField(
model_name='itsystem',
name='oim_internal_only',
field=models.BooleanField(default=False, help_text='For OIM use only', verbose_name='OIM internal only'),
),
migrations.AlterField(
model_name='itsystem',
name='user_groups',
field=models.ManyToManyField(blank=True, help_text='User group(s) that use this system', to='registers.UserGroup'),
),
migrations.AlterField(
model_name='itsystem',
name='user_notification',
field=models.EmailField(blank=True, help_text='Users (group email address) to be advised of any changes (outage or upgrade) to the system', max_length=254, null=True),
),
]
|
# Generated by Django 2.1.1 on 2019-04-02 00:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0006_private_project_report_snippet'),
]
operations = [
migrations.CreateModel(
name='Community_Private_Report_Snippet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=255)),
],
options={
'verbose_name': 'Community Private Report Snippet',
},
),
migrations.CreateModel(
name='Community_Public_Report_Snippet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=255)),
],
options={
'verbose_name': 'Community Public Report Snippet',
},
),
migrations.AlterField(
model_name='homepage',
name='hero_text',
field=models.CharField(help_text='Write an introduction for the Site', max_length=255),
),
]
|
import numpy as np
#import matplotlib as mpl
import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import Axes3D
from sklearn import datasets
from sklearn.decomposition import PCA
iris = datasets.load_iris(as_frame=True)
X = iris.data.values
y = iris.target.values
print(iris.frame.head(2))
X_reduced_2d = PCA(n_components=2).fit_transform(X)
# import matplotlib.gridspec as gridspec
fig = plt.figure(figsize = (20,15))
# ...
ax1 = fig.add_subplot(3,3,1, projection='3d')
ax1.set_title("3D iris, first three features")
ax1.scatter(X[:, 0], X[:, 1], X[:, 2], c=y, cmap=plt.cm.Set1, edgecolor='k', alpha=.2)
ax2 = fig.add_subplot(3,3,2, projection='3d')
ax2.set_title("3D iris, PCA 2D")
ax2.scatter(X_reduced_2d[:, 0], X_reduced_2d[:, 1], zs=0, c=y, cmap=plt.cm.Set1, edgecolor='k', alpha=.2)
ax3 = fig.add_subplot(3,3,3)
ax3.set_title("2D iris, PCA 2D")
ax3.scatter(X_reduced_2d[:, 0], X_reduced_2d[:, 1], c=y, cmap=plt.cm.Set1, edgecolor='k', alpha=.2)
plt.show()
|
import os
import sys
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + '/../..')
from andi_funcs import TrackGeneratorRegression, import_tracks, import_labels, package_tracks
from models import regression_model_2d
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import ModelCheckpoint
from tensorflow.keras.models import load_model
import numpy as np
# Load validation data
tracks_val = package_tracks(import_tracks('../../Datasets/Validation/task1.txt')[1], dimensions=2, max_T=1001)
exponents_val = import_labels('../../Datasets/Validation/ref1.txt')[1]
tracks_test = package_tracks(import_tracks('../../Datasets/Test/task1.txt')[1], dimensions=2, max_T=1001)
# Run model
model = regression_model_2d()
model.compile(optimizer=Adam(learning_rate=0.001), loss='mse', metrics=['mae'])
model.summary()
history = model.fit(TrackGeneratorRegression(batches=200, batch_size=32, dimensions=2, min_T=5, max_T=1001),
epochs=200,
callbacks=[
ModelCheckpoint(filepath='../Models/2D.h5', monitor='val_mae', save_best_only=True,
mode='min')],
validation_data=(tracks_val, exponents_val), use_multiprocessing=True, workers=16)
# Save performance metrics
np.savetxt('2D_mae.txt', history.history['mae'])
np.savetxt('2D_val_mae.txt', history.history['val_mae'])
# Evaluate on test data
model = load_model('../Models/2D.h5')
np.savetxt('../../Datasets/Test/predictions_task1_2D.txt', model.predict(tracks_test, use_multiprocessing=True))
|
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
# MIT License
#
# Copyright (c) 2021 Nathan Juraj Michlo
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
import logging
import os
from abc import ABCMeta
from typing import List, Tuple
import h5py
from disent.data.util.in_out import basename_from_url, download_file, ensure_dir_exists
from disent.data.util.state_space import StateSpace
log = logging.getLogger(__name__)
# ========================================================================= #
# ground truth data #
# ========================================================================= #
class GroundTruthData(StateSpace):
def __init__(self):
assert len(self.factor_names) == len(self.factor_sizes), 'Dimensionality mismatch of FACTOR_NAMES and FACTOR_DIMS'
super().__init__(factor_sizes=self.factor_sizes)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
# Overrides #
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
@property
def factor_names(self) -> Tuple[str, ...]:
raise NotImplementedError()
@property
def factor_sizes(self) -> Tuple[int, ...]:
raise NotImplementedError()
@property
def observation_shape(self) -> Tuple[int, ...]:
# shape as would be for a non-batched observation
# eg. H x W x C
raise NotImplementedError()
@property
def x_shape(self) -> Tuple[int, ...]:
# shape as would be for a single observation in a torch batch
# eg. C x H x W
shape = self.observation_shape
return shape[-1], *shape[:-1]
def __getitem__(self, idx):
raise NotImplementedError
# ========================================================================= #
# dataset helpers #
# ========================================================================= #
class DownloadableGroundTruthData(GroundTruthData, metaclass=ABCMeta):
def __init__(self, data_dir='data/dataset', force_download=False):
super().__init__()
# paths
self._data_dir = ensure_dir_exists(data_dir)
self._data_paths = [os.path.join(self._data_dir, basename_from_url(url)) for url in self.dataset_urls]
# meta
self._force_download = force_download
# DOWNLOAD
self._do_download_dataset()
def _do_download_dataset(self):
for path, url in zip(self.dataset_paths, self.dataset_urls):
no_data = not os.path.exists(path)
# download data
if self._force_download or no_data:
download_file(url, path, overwrite_existing=True)
@property
def dataset_paths(self) -> List[str]:
"""path that the data should be loaded from in the child class"""
return self._data_paths
@property
def dataset_urls(self) -> List[str]:
raise NotImplementedError()
class PreprocessedDownloadableGroundTruthData(DownloadableGroundTruthData, metaclass=ABCMeta):
def __init__(self, data_dir='data/dataset', force_download=False, force_preprocess=False):
super().__init__(data_dir=data_dir, force_download=force_download)
# paths
self._proc_path = f'{self._data_path}.processed'
self._force_preprocess = force_preprocess
# PROCESS
self._do_download_and_process_dataset()
def _do_download_dataset(self):
# we skip this in favour of our new method,
# so that we can lazily download the data.
pass
def _do_download_and_process_dataset(self):
no_data = not os.path.exists(self._data_path)
no_proc = not os.path.exists(self._proc_path)
# preprocess only if required
do_proc = self._force_preprocess or no_proc
# lazily download if required for preprocessing
do_data = self._force_download or (no_data and do_proc)
if do_data:
download_file(self.dataset_url, self._data_path, overwrite_existing=True)
if do_proc:
# TODO: also used in io save file, convert to with syntax.
# save to a temporary location in case there is an error, we then know one occured.
path_dir, path_base = os.path.split(self._proc_path)
ensure_dir_exists(path_dir)
temp_proc_path = os.path.join(path_dir, f'.{path_base}.temp')
# process stuff
self._preprocess_dataset(path_src=self._data_path, path_dst=temp_proc_path)
# delete existing file if needed
if os.path.isfile(self._proc_path):
os.remove(self._proc_path)
# move processed file to correct place
os.rename(temp_proc_path, self._proc_path)
assert os.path.exists(self._proc_path), f'Overridden _preprocess_dataset method did not initialise the required dataset file: dataset_path="{self._proc_path}"'
@property
def _data_path(self):
assert len(self.dataset_paths) == 1
return self.dataset_paths[0]
@property
def dataset_urls(self):
return [self.dataset_url]
@property
def dataset_url(self):
raise NotImplementedError()
@property
def dataset_path(self):
"""path that the dataset should be loaded from in the child class"""
return self._proc_path
@property
def dataset_path_unprocessed(self):
return self._data_path
def _preprocess_dataset(self, path_src, path_dst):
raise NotImplementedError()
class Hdf5PreprocessedGroundTruthData(PreprocessedDownloadableGroundTruthData, metaclass=ABCMeta):
"""
Automatically download and pre-process an hdf5 dataset
into the specific chunk sizes.
Often the (non-chunked) dataset will be optimized for random accesses,
while the unprocessed (chunked) dataset will be better for sequential reads.
- The chunk size specifies the region of data to be loaded when accessing a
single element of the dataset, if the chunk size is not correctly set,
unneeded data will be loaded when accessing observations.
- override `hdf5_chunk_size` to set the chunk size, for random access
optimized data this should be set to the minimum observation shape that can
be broadcast across the shape of the dataset. Eg. with observations of shape
(64, 64, 3), set the chunk size to (1, 64, 64, 3).
TODO: Only supports one dataset from the hdf5 file
itself, labels etc need a custom implementation.
"""
def __init__(self, data_dir='data/dataset', in_memory=False, force_download=False, force_preprocess=False):
super().__init__(data_dir=data_dir, force_download=force_download, force_preprocess=force_preprocess)
self._in_memory = in_memory
# Load the entire dataset into memory if required
if self._in_memory:
with h5py.File(self.dataset_path, 'r', libver='latest', swmr=True) as db:
# indexing dataset objects returns numpy array
# instantiating np.array from the dataset requires double memory.
self._memory_data = db[self.hdf5_name][:]
else:
# is this thread safe?
self._hdf5_file = h5py.File(self.dataset_path, 'r', libver='latest', swmr=True)
self._hdf5_data = self._hdf5_file[self.hdf5_name]
def __getitem__(self, idx):
if self._in_memory:
return self._memory_data[idx]
else:
return self._hdf5_data[idx]
def __del__(self):
# do we need to do this?
if not self._in_memory:
self._hdf5_file.close()
def _preprocess_dataset(self, path_src, path_dst):
import os
from disent.data.util.hdf5 import hdf5_resave_dataset, hdf5_test_entries_per_second, bytes_to_human
# resave datasets
with h5py.File(path_src, 'r') as inp_data:
with h5py.File(path_dst, 'w') as out_data:
hdf5_resave_dataset(inp_data, out_data, self.hdf5_name, self.hdf5_chunk_size, self.hdf5_compression, self.hdf5_compression_lvl)
# File Size:
log.info(f'[FILE SIZES] IN: {bytes_to_human(os.path.getsize(path_src))} OUT: {bytes_to_human(os.path.getsize(path_dst))}\n')
# Test Speed:
log.info('[TESTING] Access Speed...')
log.info(f'Random Accesses Per Second: {hdf5_test_entries_per_second(out_data, self.hdf5_name, access_method="random"):.3f}')
@property
def hdf5_compression(self) -> 'str':
return 'gzip'
@property
def hdf5_compression_lvl(self) -> int:
# default is 4, max of 9 doesnt seem to add much cpu usage on read, but its not worth it data wise?
return 4
@property
def hdf5_name(self) -> str:
raise NotImplementedError()
@property
def hdf5_chunk_size(self) -> Tuple[int]:
# dramatically affects access speed, but also compression ratio.
raise NotImplementedError()
# ========================================================================= #
# END #
# ========================================================================= #
|
from utils import *
from functools import lru_cache
def parse_nums(s):
return [int(i) for i in s.strip().split(',')]
def part1(values: List[int], DAYS=80) -> int:
@lru_cache
def dfs(v, d):
if d == 0: return 1
return dfs(v-1, d-1) if v else dfs(6, d-1) + dfs(8, d-1)
return sum(dfs(v, DAYS) for v in values)
def part2(values: List[int]) -> int:
return part1(values, 256)
if __name__ == "__main__":
values = data(6, parse_nums, sep='__none__')[0]
print(f'part1: {part1(values[:])}')
print(f'part2: {part2(values[:])}')
|
from aiogram.types.inline_keyboard import InlineKeyboardMarkup, InlineKeyboardButton
send_to_friend = 'Регистрируйся на CyberAlleycat от ПЕТУШКИ_СЛАБАЧКИ'
read_the_rules = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Правила гонки ℹ️', callback_data='rules')]
])
apply_registration = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Регистрация ✔️', callback_data='start_reg'),
InlineKeyboardButton('Пригласи друга на гонку',
switch_inline_query=send_to_friend)]
])
bicycle_type = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Фиксы 🚲', callback_data='fixie'),
InlineKeyboardButton('Мультиспид, синглы 🚴', callback_data='multispeed')]
])
gender = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Мужской 🙎♂️', callback_data='male'),
InlineKeyboardButton('Женский 🙎♀️', callback_data='female')],
[InlineKeyboardButton('Еще не выбрал🏳️🌈(вне зачета) ', callback_data='trap')]
])
check_reg_answer = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton(text='Всё верно', callback_data='data_ok'),
InlineKeyboardButton(text='Изменить данные',
callback_data='data_not_ok')]
])
change_gender_or_bicycle = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Пол', callback_data='gender'),
InlineKeyboardButton('Тип велосипеда',
callback_data='bicycle')]
])
are_you_ready = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Готов конечно', callback_data='ready')]
])
got_the_point = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Я на точке', callback_data='got_the_point')]
])
change_reg_data = InlineKeyboardMarkup(row_width=2,
inline_keyboard=[
[InlineKeyboardButton('Пригласи друга на гонку',
switch_inline_query=send_to_friend)
],
[InlineKeyboardButton('Жду старта 🏁', callback_data='data_ok'),
InlineKeyboardButton('Изменить данные ♲',
callback_data='data_not_ok'),
],
])
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-09-26 20:30
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('bioshareX', '0009_message'),
]
operations = [
migrations.AddField(
model_name='message',
name='viewed_by',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL),
),
]
|
#Warmup-1 > monkey_trouble
def monkey_trouble(a_smile, b_smile):
if a_smile and b_smile:
return True
if not a_smile and not b_smile:
return True
else:
return False |
#art belongs to trinket.io - https://trinket.io/python/02c914e46c
man = (
"""
------
| |
| 0
| /-+-/
| |
| |
| | |
| | |
----------
""",
"""
------
| |
| 0
| /-+-/
| |
| |
| |
| |
----------
""",
"""
------
| |
| 0
| /-+-/
| |
| |
|
|
----------
""",
"""
------
| |
| 0
| /-+-/
| |
|
|
|
----------
""",
"""
------
| |
| 0
| /-+-/
|
|
|
|
----------
""",
"""
------
| |
| 0
| /-+-
|
|
|
|
----------
""",
"""
------
| |
| 0
| -+-
|
|
|
|
----------
""",
"""
------
| |
| 0
| -+
|
|
|
|
----------
""",
"""
------
| |
| 0
| +
|
|
|
|
----------
""",
"""
------
| |
| 0
|
|
|
|
|
----------
""",
"""
------
| |
|
|
|
|
|
|
----------
"""
) |
from rest_framework import routers
from .views import employeeCreateViewSet
router = routers.SimpleRouter()
router.register(r'employee', employeeCreateViewSet)
urlpatterns = router.urls |
import flopy.mt3d as mt
class SsmAdapter:
_data = None
def __init__(self, data):
self._data = data
def validate(self):
# should be implemented
# for key in content:
# do something
# return some hints
pass
def is_valid(self):
# should be implemented
# for key in content:
# do something
# return true or false
return True
def merge(self):
default = self.default()
for key in self._data:
if key == 'stress_period_data':
default[key] = self.to_dict(self._data[key])
continue
default[key] = self._data[key]
return default
def to_dict(self, data):
if type(data) == list:
spd_dict = {}
for stress_period, record in enumerate(data):
spd_dict[stress_period] = record
return spd_dict
return data
def get_package(self, _mt):
content = self.merge()
return mt.Mt3dSsm(
_mt,
**content
)
@staticmethod
def default():
default = {
"crch": None,
"cevt": None,
"mxss": None,
"stress_period_data": None,
"dtype": None,
"extension": 'ssm',
"unitnumber": None,
"filenames": None
}
return default
@staticmethod
def read_package(package):
content = {
"crch": package.crch, # None
"cevt": package.cevt, # None
"mxss": package.mxss,
"stress_period_data": {k: [list(i) for i in v] for k, v in package.stress_period_data.data.items()},
# "dtype": package.dtype,
"extension": package.extension[0],
"unitnumber": package.unit_number[0],
# "filenames": package.filenames
}
return content
|
# Generated by Django 2.1.7 on 2019-05-06 10:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dailies', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='honkaiimpactdaily',
options={'verbose_name': 'Honkai Impact Daily', 'verbose_name_plural': 'Honkai Impact Dailies'},
),
migrations.AlterField(
model_name='honkaiimpactdaily',
name='emoji',
field=models.CharField(help_text='Must be a Unicode character', max_length=2),
),
]
|
# Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
import json
import logging
import os
import subprocess
import sys
from sawtooth.manage.node import NodeController
from sawtooth.manage.utils import get_executable_script
from sawtooth.validator_config import get_validator_configuration
LOGGER = logging.getLogger(__name__)
class SubprocessNodeController(NodeController):
def __init__(self, host_name='localhost', verbose=False):
self._host_name = host_name
self._verbose = verbose
self._base_config = get_validator_configuration([], {})
# Additional configuration for the genesis validator
self._genesis_cfg = {
"InitialConnectivity": 0,
}
# Additional configuration for the non-genesis validators
self._non_genesis_cfg = {
"InitialConnectivity": 1,
}
self._nodes = {}
def _construct_start_command(self, node_args):
host = self._host_name
node_name = node_args.node_name
http_port = node_args.http_port
gossip_port = node_args.gossip_port
cmd = get_executable_script('txnvalidator')
if self._verbose is True:
cmd += ['-vv']
cmd += ['--node', node_name]
cmd += ['--listen', "0.0.0.0:{}/TCP http".format(http_port)]
cmd += ['--listen', "{}:{}/UDP gossip".format(host, gossip_port)]
for x in node_args.config_files:
cmd += ['--config', x]
# Create and indicate special config file
config_dir = self._base_config['ConfigDirectory']
if node_args.currency_home is not None:
config_dir = os.path.join(node_args.currency_home, 'etc')
if not os.path.exists(config_dir):
os.makedirs(config_dir)
config_file = '{}_bootstrap.json'.format(node_name)
cfg = self._non_genesis_cfg
if node_args.genesis:
cfg = self._genesis_cfg
with open(os.path.join(config_dir, config_file), 'w') as f:
f.write(json.dumps(cfg, indent=4))
cmd += ['--config', config_file]
return cmd
def is_running(self, node_name):
'''
Authority on whether a node is in fact running. On discovering that a
node no longer exists, it removes the node from _nodes. We do this
here rather than in stop/kill in order to allow stop/kill to be
non-blocking. Thus, our internal model of nodes (_nodes) will always
be correct for a particular node the next time someone asks if it
'is_running'.
Args:
node_name (str):
Returns:
ret_val (bool):
'''
ret_val = False
handle = None
try:
handle = self._nodes[node_name]['Handle']
except KeyError:
pass
if handle is not None:
handle.poll()
if handle.returncode is None:
ret_val = True
if ret_val is False:
# process is authoritatively stopped; toss handle if it exists
self._nodes.pop(node_name, None)
return ret_val
def _build_env(self, node_args):
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(sys.path)
if node_args.currency_home is not None:
env['CURRENCYHOME'] = node_args.currency_home
return env
def create_genesis_block(self, node_args):
'''
Creates a key, then uses this key to author a genesis block. The node
corresponding to node_args must be initially available on the network
in order to serve this genesis block.
Args:
node_args (NodeArguments):
'''
if self.is_running(node_args.node_name) is False:
# Create key for initial validator
cmd = get_executable_script('sawtooth')
cmd += ['keygen', node_args.node_name]
# ...sawtooth keygen does not assume validator's CURRENCYHOME
key_dir = self._base_config['KeyDirectory']
if node_args.currency_home is not None:
key_dir = os.path.join(node_args.currency_home, 'keys')
cmd += ['--key-dir', key_dir]
if self._verbose is False:
cmd += ['--quiet']
proc = subprocess.Popen(cmd, env=self._build_env(node_args))
proc.wait()
# Create genesis block
cmd = get_executable_script('sawtooth')
if node_args.ledger_type is None or \
node_args.ledger_type == "poet0":
cmd += ['admin', 'poet0-genesis']
elif node_args.ledger_type == "poet1":
cmd += ['admin', 'poet1-genesis']
else:
cmd += ['admin', 'dev-mode-genesis']
if self._verbose is True:
cmd += ['-vv']
cmd += ['--node', node_args.node_name]
for x in node_args.config_files:
cmd += ['--config', x]
proc = subprocess.Popen(cmd, env=self._build_env(node_args))
proc.wait()
def do_start(self, node_args, stdout, stderr):
cmd = self._construct_start_command(node_args)
# Execute popen and store the process handle
handle = subprocess.Popen(cmd, stdout=stdout, stderr=stderr,
env=self._build_env(node_args))
handle.poll()
if handle.returncode is None:
# process is known to be running; save handle
self._nodes[node_args.node_name] = {"Handle": handle}
def _do_start(self, node_args, stdout, stderr):
self.do_start(node_args, stdout, stderr)
def start(self, node_args):
'''
Start a node if it is not already running.
Args:
node_args (NodeArguments):
'''
if self.is_running(node_args.node_name) is False:
self._do_start(node_args, sys.stdout, sys.stderr)
def stop(self, node_name):
'''
Send a non-blocking termination request to a node if it appears to be
running. OSError is caught and logged because the process may die
between is_running and our signal transmission attempt.
Args:
node_name (str):
'''
if self.is_running(node_name) is True:
try:
handle = self._nodes[node_name]['Handle']
handle.terminate()
except OSError as e:
LOGGER.debug('%s.stop failed: %s', self.__class__.__name__,
str(e))
def kill(self, node_name):
'''
Send a non-blocking kill (9) to a node if it appears to be running.
OSError is caught and logged because the process may die between
is_running and our signal transmission attempt.
Args:
node_name (str):
'''
if self.is_running(node_name) is True:
try:
handle = self._nodes[node_name]['Handle']
handle.kill()
except OSError as e:
LOGGER.debug('%s.kill failed: %s', self.__class__.__name__,
str(e))
def get_node_names(self):
names = self._nodes.keys()
return [x for x in names if self.is_running(x)]
def get_ip(self, node_name):
hostname = self._host_name
return hostname
|
from __future__ import print_function, division
import sys,os
quspin_path = os.path.join(os.getcwd(),"../")
sys.path.insert(0,quspin_path)
from quspin.operators import hamiltonian
from quspin.basis import spin_basis_general, boson_basis_general, spinless_fermion_basis_general, spinful_fermion_basis_general
import numpy as np
import scipy.sparse as sp
from quspin.operators._make_hamiltonian import _consolidate_static
#
###### define model parameters ######
J1=1.0 # spin=spin interaction
J2=0.5 # magnetic field strength
Lx, Ly = 4, 2 # linear dimension of 2d lattice
N_2d = Lx*Ly # number of sites
#
###### setting up user-defined symmetry transformations for 2d lattice ######
s = np.arange(N_2d) # sites [0,1,2,....]
x = s%Lx # x positions for sites
y = s//Lx # y positions for sites
T_x = (x+1)%Lx + Lx*y # translation along x-direction
T_y = x +Lx*((y+1)%Ly) # translation along y-direction
T_a = (x+1)%Lx + Lx*((y+1)%Ly) # translation along anti-diagonal
T_d = (x-1)%Lx + Lx*((y+1)%Ly) # translation along diagonal
R = np.rot90(s.reshape(Lx,Ly), axes=(0,1)).reshape(N_2d) # rotate anti-clockwise
P_x = x + Lx*(Ly-y-1) # reflection about x-axis
P_y = (Lx-x-1) + Lx*y # reflection about y-axis
Z = -(s+1) # spin inversion
#####
# setting up site-coupling lists
J1_list=[[J1,i,T_x[i]] for i in range(N_2d)] + [[J1,i,T_y[i]] for i in range(N_2d)]
J2_list=[[J2,i,T_d[i]] for i in range(N_2d)] + [[J2,i,T_a[i]] for i in range(N_2d)]
#
static=[ ["++",J1_list],["--",J1_list],["zz",J1_list],
["++",J2_list],["--",J2_list],["zz",J2_list]
]
static_spfs=[ ["++|",J1_list],["--|",J1_list], ["|++",J1_list],["|--",J1_list], ["z|z",J1_list],
["++|",J2_list],["--|",J2_list], ["|++",J2_list],["|--",J2_list], ["z|z",J2_list],
]
static_list = _consolidate_static(static)
static_list_spfs = _consolidate_static(static_spfs)
def compare(static_list,basis,basis_op):
for opstr,indx,J in static_list:
ME,bra,ket = basis.Op_bra_ket(opstr,indx,J,np.float64,basis_op.states)
ME_op,row,col = basis_op.Op(opstr,indx,J,np.float64)
np.testing.assert_allclose(bra - basis_op[row],0.0,atol=1E-5,err_msg='failed bra/row in Op_bra_cket test!')
np.testing.assert_allclose(ket - basis_op[col],0.0,atol=1E-5,err_msg='failed ket/col in Op_bra_ket test!')
np.testing.assert_allclose(ME - ME_op,0.0,atol=1E-5,err_msg='failed ME in Op_bra_ket test!')
for Np in [ None, 2, N_2d-1, [N_2d//4,N_2d//8] ]:
basis=spin_basis_general(N_2d, make_basis=False,
Nup=Np,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
zblock=(Z,0)
)
basis_op=spin_basis_general(N_2d, make_basis=True,
Nup=Np,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
zblock=(Z,0)
)
compare(static_list,basis,basis_op)
print('passed spins')
basis=spinless_fermion_basis_general(N_2d, make_basis=False,
Nf=Np,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
)
basis_op=spinless_fermion_basis_general(N_2d, make_basis=True,
Nf=Np,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
)
compare(static_list,basis,basis_op)
print('passed spinless fermios')
basis=boson_basis_general(N_2d, make_basis=False,
Nb=Np, sps=3,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
)
basis_op=boson_basis_general(N_2d, make_basis=True,
Nb=Np, sps=3,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
)
compare(static_list,basis,basis_op)
print('passed bosons')
if Np==None:
Nf=Np
elif type(Np) is list:
Nf=list(zip(Np,Np))
else:
Nf=(Np,Np)
basis=spinful_fermion_basis_general(N_2d, make_basis=False,
Nf=Nf,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
)
basis_op=spinful_fermion_basis_general(N_2d, make_basis=True,
Nf=Nf,
kxblock=(T_x,0),kyblock=(T_y,0),
pxblock=(P_x,0),pyblock=(P_y,0),
)
compare(static_list_spfs,basis,basis_op)
print('passed spinful fermios')
|
import scipy
from scipy.io import wavfile
import os
import numpy as np
import glob
from matplotlib import pyplot
GENRES = ["blues", "classical", "country", "disco", "hiphop", "jazz",
"metal", "pop", "reggae", "rock"]
def create_fft(filename):
s_rate, data = scipy.io.wavfile.read(filename) #get sample rate and data
fft_feat = abs(scipy.fft(data)[:1000])
#fft_features = abs(scipy.fft(data)[:3000]) #doesn't help, it only increases running time
base_filename, ext = os.path.splitext(filename)
data_filename = base_filename + ".fft"
np.save(data_filename, fft_feat)
def read_fft(genre_list, base_dir):
X = [] #will store fft features
y = [] #will store labels
for l, g in enumerate(genre_list): #loop using label as numeric index, and genre as iterator over genres
genre_dir = os.path.join(base_dir, g, "*.fft.npy") #create something like: "genres/classical/*.fft.npy"
file_list = glob.glob(genre_dir) #retrieve al the files under "genres/classical" with "fft.npy" ext
for f in file_list:
fft_features = np.load(f)
X.append(fft_features[:1000])
#X.append(fft_features[:3000]) ##doesn't help, it only increases running time
y.append(l)
return np.array(X), np.array(y)
#def plot_confusion_matrix(cm, genre_list, name, title):
# pyplot.clf()
# pyplot.matshow(cm, fignum=False, cmap='Blues', vmin=0, vmax=1.0)
# ax = pyplot.axes()
# ax.set_xticks(range(len(genre_list)))
# ax.set_xticklabels(genre_list)
# ax.xaxis.set_ticks_position("bottom")
# ax.set_yticks(range(len(genre_list)))
# ax.set_yticklabels(genre_list)
# pyplot.title(title)
# pyplot.colorbar()
# pyplot.grid(False)
# pyplot.xlabel('Predicted class')
# pyplot.ylabel('True class')
# pyplot.grid(False)
# pyplot.show()
#X, y = read_fft(GENRES, GENRE_DIR) #get the data set
#TEST GRAPHICAL CONFUSION MATRIX
#from sklearn.metrics import confusion_matrix
#from sklearn.preprocessing import normalize
#y_true = [1,1,1,2,2,2,3,3,3,4,4,4]
#y_pred = [1,2,1,3,3,2,3,3,3,4,3,2]
#cm = confusion_matrix(y_true, y_pred)
#cm = normalize(cm) #remember to normalize
#plot_confusion_matrix(cm, ["classical","blues","jazz","country"], "name", "plot")
|
"""
Radio button page
clicking them in different order
"""
from pages.base_page import BasePage
import time
from selenium.webdriver.common.by import By
class RadioButtonPage(BasePage):
RADIO_BTN_1 = (By.ID, 'radio-button-1')
RADIO_BTN_2 = (By.XPATH, '/html/body/div/div[2]/input')
RADIO_BTN_3 = (By.XPATH, '/html/body/div/div[3]/input')
def click_on_btn_3(self):
btn_1 = self.driver.find_element(*self.RADIO_BTN_1)
btn_2 = self.driver.find_element(*self.RADIO_BTN_2)
btn_3 = self.driver.find_element(*self.RADIO_BTN_3)
btn_2.click()
time.sleep(1)
btn_1.click()
time.sleep(1)
btn_3.click()
def click_on_btn_2(self):
btn_1 = self.driver.find_element(*self.RADIO_BTN_1)
btn_2 = self.driver.find_element(*self.RADIO_BTN_2)
btn_3 = self.driver.find_element(*self.RADIO_BTN_3)
btn_1.click()
time.sleep(1)
btn_3.click()
time.sleep(1)
btn_2.click()
def click_on_btn_1(self):
btn_1 = self.driver.find_element(*self.RADIO_BTN_1)
btn_2 = self.driver.find_element(*self.RADIO_BTN_2)
btn_3 = self.driver.find_element(*self.RADIO_BTN_3)
btn_3.click()
time.sleep(1)
btn_2.click()
time.sleep(1)
btn_1.click()
|
from numpy.polynomial import polynomial as p
def gcd1(c1,c2):
if(c1[1:]==0.):
return c2;
else:
division=p.polydiv(c2,c1);
return (division[1:],c2);
c1=(1,2,3);
c2=(3,2,1);
#print p.polydiv(c2,c1);
#division=p.polydiv(c2,c1);
#print p.polydiv(c1,c2);
#print division[1:];
#print p.polydiv(c1,c1);
result=gcd1(c2,c1);
print result[0:1];
#print remaider;
|
from . import lexer, parser, interpret
import os
print("Welcome to Mathwise. (type '.exit' to exit and '.run' to run the queued lines, '.clear' to clear the console, type '.help' for more info)")
data = ""
while True:
inp = input("mw > ")
if inp.lower().strip() == ".exit":
break
elif inp.lower().strip() == ".about":
print("Mathwise - A math interpreter in Python")
print("Version v0.1.0 developed by Angel Carias. 2021.")
elif inp.lower().strip() == ".help":
print("Mathwise REPL\n"
"\t.exit - Exit the REPL interface\n"
"\t.about - About this interpreter\n"
"\t.run - Runs the currently queued lines\n"
"\t.help - Runs this thing\n"
"\t.clear - clears the console\n"
)
elif inp.lower().strip() == ".clear":
os.system('cls' if os.name == 'nt' else 'clear')
elif inp.lower().strip() == ".run":
data = data.strip("\n")
lex = lexer.Lexer(data)
tokens, error = lex.lex()
if error:
print(error.to_string())
data = ""
continue
ast = parser.Parser(data, tokens)
ast_data = ast.parse()
if ast.error:
print(ast.error.to_string())
data = ""
continue
inter = interpret.Interpreter(data)
result = inter.goto(ast_data)
if inter.error:
print(inter.error.to_string())
else:
print(result)
data = ""
else:
# if inp == None:
# continue
data += inp + "\n"
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import logging
import numpy as np
from . import game
from . import gradient
from . import opencl
log = logging.getLogger()
DEFAULT_FORMULAS = {
"duck2": """
z = cdouble_divide(z, cdouble_cos(z));
z = cdouble_add(z, c);
z = cdouble_log(z);
""",
"m2": """
""",
}
DEFAULT_KERNELS = {
"orbit-rgb": """
__constant uint gradient[] = {{{gradient_values}}};
#define PYOPENCL_DEFINE_CDOUBLE 1
#include <pyopencl-complex.h>
#pragma OPENCL EXTENSION cl_khr_byte_addressable_store : enable
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
__kernel void compute(
__global double2 *plane,
__global uint *pixels,
char const julia,
uint const max_iter,
uint const pre_iter,
double const gradient_frequency,
double const c_real,
double const c_imag
{kernel_params}
) {{
int gid = get_global_id(0);
cdouble_t z;
cdouble_t z2;
cdouble_t c;
if (julia) {{
z = cdouble_new({pos_x}, {pos_y});
c = cdouble_new(c_real, c_imag);
}} else {{
z = cdouble_new(0, 0);
c = cdouble_new({pos_x}, {pos_y});
}}
{kernel_variables}
double escape = {escape_distance};
double modulus = 0.0f;
cdouble_t orbit;
double orbit_modulus = 0.0f;
orbit = cdouble_new(mod, mod2);
double mean = 0.0f;
double rdistance = escape;
double gdistance = escape;
int iter;
pixels[gid] = 0x00000000;
for (iter = 0; iter < max_iter; iter++) {{
{formula}
orbit_modulus = fabs(z.imag - orbit.imag);
if (orbit_modulus < trap) {{
pixels[gid] = gradient[(int)(
((orbit_modulus / trap) * {gradient_length}/10)) + 20];
break ;
}}
orbit_modulus = fabs(z.real - orbit.real);
if (orbit_modulus < trap) {{
pixels[gid] = gradient[(int)(
((orbit_modulus / trap) * {gradient_length} / 10))];
// pixels[gid] = 0xff000000 | ((int)(0xff * orbit_modulus / trap) << 8);
break ;
}}
modulus = cdouble_abs(z);
if (modulus > escape) {{
modulus = iter - log(log(modulus)) / log(2.0f) +
log(log(escape)) / log(2.0f);
modulus = modulus / (double)max_iter;
pixels[gid] = gradient[(int)(
(modulus * {gradient_length} * gradient_frequency)) %
{gradient_length}];
break;
}}
}}
}}
""",
"orbit-gradient": """
__constant uint gradient[] = {{{gradient_values}}};
#define PYOPENCL_DEFINE_CDOUBLE 1
#include <pyopencl-complex.h>
#pragma OPENCL EXTENSION cl_khr_byte_addressable_store : enable
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
__kernel void compute(
__global double2 *plane,
__global uint *pixels,
char const julia,
uint const max_iter,
uint const pre_iter,
double const gradient_frequency,
double const c_real,
double const c_imag
{kernel_params}
) {{
int gid = get_global_id(0);
cdouble_t z;
cdouble_t z2;
cdouble_t c;
if (julia) {{
z = cdouble_new({pos_x}, {pos_y});
c = cdouble_new(c_real, c_imag);
}} else {{
z = cdouble_new(0, 0);
c = cdouble_new({pos_x}, {pos_y});
}}
{kernel_variables}
double escape = {escape_distance};
double modulus = 0.0f;
cdouble_t orbit;
double orbit_modulus = 0.0f;
orbit = cdouble_new(mod, mod2);
double mean = 0.0f;
double distance = escape;
int iter;
for (iter = 0; iter < max_iter; iter++) {{
{formula}
orbit_modulus = fabs(z.imag - orbit.imag);
//mean += orbit_modulus;
if (orbit_modulus < distance) {{
distance = orbit_modulus;
}}
orbit_modulus = fabs(z.real - orbit.real);
if (orbit_modulus < distance) {{
distance = orbit_modulus;
}}
modulus = cdouble_abs(z);
if (modulus > escape) {{
break;
}}
}}
distance = sqrt(distance);
pixels[gid] = gradient[(int)(
(distance * {gradient_length} * gradient_frequency)) %
{gradient_length}]
;
}}
""",
"escape-time-gradient": """
__constant uint gradient[] = {{{gradient_values}}};
#define PYOPENCL_DEFINE_CDOUBLE 1
#include <pyopencl-complex.h>
#pragma OPENCL EXTENSION cl_khr_byte_addressable_store : enable
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
__kernel void compute(
__global double2 *plane,
__global uint *pixels,
char const julia,
uint const max_iter,
uint const pre_iter,
double const gradient_frequency,
double const c_real,
double const c_imag
{kernel_params}
) {{
int gid = get_global_id(0);
cdouble_t z;
cdouble_t z2;
cdouble_t c;
if (julia) {{
z = cdouble_new({pos_x}, {pos_y});
c = cdouble_new(c_real, c_imag);
}} else {{
z = cdouble_new(0, 0);
c = cdouble_new({pos_x}, {pos_y});
}}
{kernel_variables}
double escape = {escape_distance};
double modulus = 0.0f;
int iter;
for (iter = 0; iter < max_iter; iter++) {{
{formula}
modulus = cdouble_abs(z);
if (modulus > escape) {{
modulus = iter - log(log(modulus)) / log(2.0f) +
log(log(escape)) / log(2.0f);
modulus = modulus / (double)max_iter;
pixels[gid] = gradient[(int)(
(modulus * {gradient_length} * gradient_frequency)) %
{gradient_length}];
break;
}}
}}
}}
""",
"mean-distance": """
__constant uint gradient[] = {{{gradient_values}}};
#define PYOPENCL_DEFINE_CDOUBLE 1
#include <pyopencl-complex.h>
#pragma OPENCL EXTENSION cl_khr_byte_addressable_store : enable
#pragma OPENCL EXTENSION cl_khr_fp64 : enable
cdouble_t cdouble_iabs(cdouble_t t) {{
t.imag = fabs(t.imag);
return t;
}}
cdouble_t cdouble_rabs(cdouble_t t) {{
t.real = fabs(t.real);
return t;
}}
cdouble_t cdouble_fabs(cdouble_t t) {{
t.real = fabs(t.real);
t.imag = fabs(t.imag);
return t;
}}
__kernel void compute(
__global double2 *plane,
__global uint *pixels,
char const julia,
uint const max_iter,
uint const pre_iter,
double const gradient_frequency,
double const c_real,
double const c_imag
{kernel_params}
) {{
int gid = get_global_id(0);
cdouble_t z;
cdouble_t z2;
cdouble_t c;
double mean = 0.0f;
if (julia) {{
z = cdouble_new({pos_x}, {pos_y});
c = cdouble_new(c_real, c_imag);
}} else {{
z = cdouble_new(0, 0);
c = cdouble_new({pos_x}, {pos_y});
}}
{kernel_variables}
double escape = {escape_distance};
double modulus = 0.0f;
int iter;
for (iter = 0; iter < max_iter; iter++) {{
{formula}
if (iter > pre_iter) {{
modulus = cdouble_abs(z);
mean += modulus;
}}
if (modulus > escape && iter > pre_iter) {{
break;
}}
}}
mean = 1.0 - log2(0.5 * log2(mean / (double)(iter - pre_iter)));
pixels[gid] = gradient[(int)(
(mean * {gradient_length} * gradient_frequency)) % {gradient_length}];
}}
""",
}
class Fractal(game.Window, game.ComplexPlane):
def __init__(self, winsize, params, gpu=None):
game.Window.__init__(self, winsize)
self.params = params
self.draw = True
self.alive = True
self.mapmode = False
self.map_scene = None
if gpu:
self.gpu = gpu
self.mapmode = True
self.previous_c = collections.deque(maxlen=2000)
self.set_view(self.params["map_center_real"],
self.params["map_center_imag"],
self.params["map_radius"])
return
x, y = 'x', 'y'
if params['xyinverted']:
x, y = 'y', 'x'
cl_params = copy.copy(params)
cl_params["pos_x"] = "plane[gid]." + x
cl_params["pos_y"] = "plane[gid]." + y
if "gradient" in params:
cl_params["gradient_values"] = gradient.generate_array(
params["gradient"], params["gradient_length"])
cl_params["gradient_length"] = params["gradient_length"]
if cl_params["formula"] in DEFAULT_FORMULAS:
cl_params["formula"] = DEFAULT_FORMULAS[cl_params["formula"]]
if cl_params["kernel"] in DEFAULT_KERNELS:
kernel = DEFAULT_KERNELS[cl_params["kernel"]]
if cl_params.get("kernel_params"):
cl_params["kernel_params"] = "," + cl_params["kernel_params"]
program = kernel.format(**cl_params)
log.debug(program)
self.gpu = opencl.OpenCLCompute(program)
def render(self, frame):
if self.map_scene:
self.map_scene.add_c(
complex(self.params["c_real"], self.params["c_imag"]))
updated = False
if self.map_scene:
updated = self.map_scene.render(frame)
if not self.draw:
return updated
if self.mapmode:
view_prefix = "map_"
else:
view_prefix = ""
super_sampling = self.params["super_sampling"]
self.set_view(self.params[view_prefix + "center_real"],
self.params[view_prefix + "center_imag"],
self.params[view_prefix + "radius"])
x = np.linspace(self.plane_min[0], self.plane_max[0],
self.window_size[0] * super_sampling)
y = np.linspace(self.plane_min[1], self.plane_max[1],
self.window_size[1] * super_sampling) * 1j
plane = np.ravel(y+x[:, np.newaxis]).astype(np.complex128)
render_args = [
plane,
np.byte(self.params["julia"] and not self.mapmode),
np.uint32(self.params["max_iter"]),
np.uint32(self.params.get("pre_iter", 0)),
np.double(self.params["grad_freq"]),
np.double(self.params["c_real"]),
np.double(self.params["c_imag"]),
]
for kernel_param in self.params["kernel_params_mod"]:
render_args.append(np.double(self.params[kernel_param]))
nparray = self.gpu.render(*render_args)
if super_sampling > 1:
import scipy.ndimage
import scipy.misc
s = (self.window_size[0], self.window_size[1])
nparray = scipy.misc.imresize(
nparray.view(np.uint8).reshape(s[0]*super_sampling,
s[1]*super_sampling, 4),
s,
interp='cubic',
mode='RGBA')
self.blit(nparray.view(np.uint32))
self.draw = False
if self.mapmode:
self.draw_previous_c()
return True
def create_map_scene(self, win_size, params):
self.map_scene = Fractal(win_size, params, gpu=self.gpu)
def add_c(self, c):
if self.params["show_map"]:
if self.params["xyinverted"]:
c = complex(c.imag, c.real)
if not len(self.previous_c) or self.previous_c[-1] != c:
self.previous_c.append(c)
self.draw = True
if not self.included(c):
# Re-center
self.params["map_center_real"] = c.real
self.params["map_center_imag"] = c.imag
self.draw = True
def draw_previous_c(self):
length = len(self.previous_c)
pos = 0
for c in self.previous_c:
pos += 1
self.draw_complex(
c, color=[100 + int(100 * (pos / length))]*3, width=2)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 12 16:51:05 2016
@author: Dominic O'Kane
"""
import numpy as np
from scipy import optimize
from scipy.stats import norm
from ...finutils.FinDate import FinDate
from ...finutils.FinMath import nprime
from ...finutils.FinGlobalVariables import gDaysInYear
from ...finutils.FinError import FinError
from ...products.FinOptionTypes import FinOptionTypes
from ...products.fx.FinFXModelTypes import FinFXModel
from ...products.fx.FinFXModelTypes import FinFXModelBlackScholes
from ...products.fx.FinFXModelTypes import FinFXModelSABR
from ...models.FinModelCRRTree import crrTreeValAvg
from ...models.FinModelSABR import blackVolFromSABR
N = norm.cdf
###############################################################################
###############################################################################
def f(volatility, *args):
''' This is the objective function used in the determination of the FX
Option implied volatility which is computed in the class below. '''
self = args[0]
valueDate = args[1]
stockPrice = args[2]
discountCurve = args[3]
dividendYield = args[4]
price = args[5]
model = FinFXModelBlackScholes(volatility)
self.value(valueDate,
stockPrice,
discountCurve,
dividendYield,
model)
objFn = self._vdf - price
return objFn
###############################################################################
###############################################################################
def fvega(volatility, *args):
''' This is the derivative of the objective function with respect to the
option volatility. It is used to speed up the determination of the FX
Option implied volatility which is computed in the class below. '''
self = args[0]
valueDate = args[1]
stockPrice = args[2]
discountCurve = args[3]
dividendYield = args[4]
model = FinFXModelBlackScholes(volatility)
fprime = self.vega(valueDate,
stockPrice,
discountCurve,
dividendYield,
model)
return fprime
###############################################################################
# ALL CCY RATES MUST BE IN NUM UNITS OF DOMESTIC PER UNIT OF FOREIGN CURRENCY
# SO EURUSD = 1.30 MEANS 1.30 DOLLARS PER EURO SO DOLLAR IS THE DOMESTIC AND
# EUR IS THE FOREIGN CURRENCY
###############################################################################
class FinFXVanillaOption():
# ''' This is a class for an FX Option trade. It permits the user to
# calculate the price of an FX Option trade which can be expressed in a
# number of ways depending on the investor or hedger's currency. It aslo
# allows the calculation of the option's delta in a number of forms as
# well as the various Greek risk sensitivies. '''
def __init__(self,
expiryDate,
strikeFXRate, # ONE UNIT OF FOREIGN IN DOMESTIC CCY
currencyPair, # FORDOM
optionType,
notional,
notionalCurrency,
spotDays = 0):
''' Create the FX Vanilla Option object. Inputs include expiry date,
strike, currency pair, option type (call or put), notional and the
currency of the notional. And adjustment for spot days is enabled. All
currency rates must be entered in the price in domestic currency of
one unit of foreign. And the currency pair should be in the form FORDOM
where FOR is the foreign currency pair currency code and DOM is the
same for the domestic currency. '''
deliveryDate = expiryDate.addWorkDays(spotDays)
''' The FX rate is in the price in domestic currency ccy2 of a single unit
of the foreign currency which is ccy1. For example EURUSD of 1.3 is the
price in USD (CCY2) of 1 unit of EUR (CCY1)'''
if deliveryDate < expiryDate:
raise FinError("Delivery date must be on or after expiry date.")
if len(currencyPair) != 6:
raise FinError("Currency pair must be 6 characters.")
self._expiryDate = expiryDate
self._deliveryDate = deliveryDate
self._strikeFXRate = strikeFXRate
self._currencyPair = currencyPair
self._forName = self._currencyPair[0:3]
self._domName = self._currencyPair[3:6]
if notionalCurrency != self._domName and notionalCurrency != self._forName:
raise FinError("Notional currency not in currency pair.")
self._notionalCurrency = notionalCurrency
self._notional = notional
if optionType != FinOptionTypes.EUROPEAN_CALL and \
optionType != FinOptionTypes.EUROPEAN_PUT and\
optionType != FinOptionTypes.AMERICAN_CALL and \
optionType != FinOptionTypes.AMERICAN_PUT:
raise FinError("Unknown Option Type:" + optionType)
self._optionType = optionType
self._spotDays = spotDays
###############################################################################
###############################################################################
def value(self,
valueDate,
spotFXRate, # ONE UNIT OF FOREIGN IN DOMESTIC CCY
domDiscountCurve,
forDiscountCurve,
model):
''' This function calculates the value of the option using a specified
model with the resulting value being in domestic i.e. ccy2 terms.
Recall that Domestic = CCY2 and Foreign = CCY1 and FX rate is in
price in domestic of one unit of foreign currency. '''
if type(valueDate) == FinDate:
spotDate = valueDate.addWorkDays(self._spotDays)
tdel = (self._deliveryDate - spotDate) / gDaysInYear
texp = (self._expiryDate - valueDate) / gDaysInYear
else:
tdel = valueDate
texp = tdel
if np.any(spotFXRate <= 0.0):
raise FinError("spotFXRate must be greater than zero.")
if model._parentType != FinFXModel:
raise FinError("Model is not inherited off type FinFXModel.")
if np.any(tdel < 0.0):
raise FinError("Time to expiry must be positive.")
tdel = np.maximum(tdel, 1e-10)
domDF = domDiscountCurve.df(tdel)
rd = -np.log(domDF)/tdel
forDF = forDiscountCurve.df(tdel)
rf = -np.log(forDF)/tdel
S0 = spotFXRate
K = self._strikeFXRate
F0T = S0 * np.exp((rd-rf)*tdel)
if type(model) == FinFXModelBlackScholes \
or type(model) == FinFXModelSABR:
if type(model) == FinFXModelBlackScholes:
volatility = model._volatility
elif type(model) == FinFXModelSABR:
volatility = blackVolFromSABR(model.alpha,
model.beta,
model.rho,
model.nu,
F0T, K, tdel)
if np.any(volatility) < 0.0:
raise FinError("Volatility should not be negative.")
volatility = np.maximum(volatility, 1e-10)
lnS0k = np.log(S0/K)
sqrtT = np.sqrt(texp)
den = volatility * sqrtT
mu = rd - rf
v2 = volatility * volatility
d1 = (lnS0k + mu * tdel + v2 * texp / 2.0) / den
d2 = (lnS0k + mu * tdel - v2 * texp / 2.0) / den
numStepsPerYear = 100
if self._optionType == FinOptionTypes.EUROPEAN_CALL:
vdf = np.exp(-rd*tdel) * (F0T*N(d1) - K*N(d2))
elif self._optionType == FinOptionTypes.EUROPEAN_PUT:
vdf = -np.exp(-rd*tdel) * (F0T*N(-d1) - K*N(-d2))
elif self._optionType == FinOptionTypes.AMERICAN_CALL:
vdf = crrTreeValAvg(S0, rd, rf, volatility, numStepsPerYear,
texp, FinOptionTypes.AMERICAN_CALL.value, K)['value']
elif self._optionType == FinOptionTypes.AMERICAN_PUT:
vdf = crrTreeValAvg(S0, rd, rf, volatility, numStepsPerYear,
texp, FinOptionTypes.AMERICAN_PUT.value, K)['value']
else:
raise FinError("Unknown option type")
# The option value v is in domestic currency terms but the value of the
# option may be quoted in either currency terms and so we calculate these
if self._notionalCurrency == self._domName:
self._notional_dom = self._notional
self._notional_for = self._notional / self._strikeFXRate
elif self._notionalCurrency == self._forName:
self._notional_dom = self._notional * self._strikeFXRate
self._notional_for = self._notional
else:
raise FinError("Invalid notional currency.")
self._vdf = vdf
self._pips_dom = vdf
self._pips_for = vdf / (spotFXRate * self._strikeFXRate)
self._cash_dom = vdf * self._notional_dom / self._strikeFXRate
self._cash_for = vdf * self._notional_for / spotFXRate
self._pct_dom = vdf / self._strikeFXRate
self._pct_for = vdf / spotFXRate
return { 'v': vdf,
"cash_dom": self._cash_dom,
"cash_for": self._cash_for,
"pips_dom": self._pips_dom,
"pips_for": self._pips_for,
"pct_dom": self._pct_dom,
"pct_for": self._pct_for,
"not_dom": self._notional_dom,
"not_for": self._notional_for,
"ccy_dom": self._domName,
"ccy_for": self._forName}
###############################################################################
###############################################################################
def delta_bump(self,
valueDate,
spotFXRate,
ccy1DiscountCurve,
ccy2DiscountCurve,
model):
''' Calculation of the FX option delta by bumping the spot FX rate by
1 cent of its value. This gives the FX spot delta. For speed we prefer
to use the analytical calculation of the derivative given below. '''
bump = 0.0001 * spotFXRate
v = self.value(
valueDate,
spotFXRate,
ccy1DiscountCurve,
ccy2DiscountCurve,
model)
vBumped = self.value(
valueDate,
spotFXRate + bump,
ccy1DiscountCurve,
ccy2DiscountCurve,
model)
if type(vBumped) is dict:
delta = (vBumped['value'] - v['value']) / bump
else:
delta = (vBumped - v) / bump
return delta
###############################################################################
###############################################################################
def delta(self,
valueDate,
spotFXRate,
domDiscountCurve,
forDiscountCurve,
model):
''' Calculation of the FX Option delta. There are several definitions
of delta and so we are required to return a dictionary of values. The
definitions can be found on Page 44 of Foreign Exchange Option Pricing
by Iain Clark, published by Wiley Finance. '''
if type(valueDate) == FinDate:
spotDate = valueDate.addWorkDays(self._spotDays)
tdel = (self._deliveryDate - spotDate) / gDaysInYear
texp = (self._expiryDate - valueDate) / gDaysInYear
else:
tdel = valueDate
texp = tdel
if np.any(spotFXRate <= 0.0):
raise FinError("Spot FX Rate must be greater than zero.")
if model._parentType != FinFXModel:
raise FinError("Model is not inherited off type FinFXModel.")
if np.any(tdel < 0.0):
raise FinError("Time to expiry must be positive.")
tdel = np.maximum(tdel, 1e-10)
domDf = domDiscountCurve.df(tdel)
rd = -np.log(domDf)/tdel
forDf = forDiscountCurve.df(tdel)
rf = -np.log(forDf)/tdel
K = self._strikeFXRate
S0 = spotFXRate
F = S0 * np.exp((rd-rf)*tdel)
if type(model) == FinFXModelBlackScholes:
volatility = model._volatility
if np.any(volatility < 0.0):
raise FinError("Volatility should not be negative.")
volatility = np.maximum(volatility, 1e-10)
lnS0k = np.log(spotFXRate / self._strikeFXRate)
sqrtT = np.sqrt(texp)
den = volatility * sqrtT
mu = rd - rf
v2 = volatility * volatility
d1 = (lnS0k + mu * tdel + v2 * texp / 2.0) / den
d2 = (lnS0k + mu * tdel - v2 * texp / 2.0) / den
if self._optionType == FinOptionTypes.EUROPEAN_CALL:
w = 1
elif self._optionType == FinOptionTypes.EUROPEAN_PUT:
w = -1
else:
raise FinError("Unknown option type")
spot_delta = w*np.exp(-rf * tdel)*N(w*d1)
self._pips_spot_delta = spot_delta
self._pips_fwd_delta = w*N(w*d1)
self._pips_fut_delta = w*np.exp(-rd*tdel)*N(w*d1)
self._pct_spot_delta_prem_adj = w*np.exp(-rd*tdel)*N(w*d2)*K/S0
self._pct_fwd_delta_prem_adj = w*K*N(w*d2)/F
self._simple = w*N(w*(d1+d2)/2.0)
return {"pips_spot_delta": self._pips_spot_delta,
"pips_fwd_delta": self._pips_fwd_delta,
"pips_fut_delta": self._pips_fut_delta,
"pct_spot_delta_prem_adj": self._pct_spot_delta_prem_adj,
"pct_fwd_delta_prem_adj": self._pct_fwd_delta_prem_adj,
"simple": self._simple }
###############################################################################
###############################################################################
def gamma(self,
valueDate,
spotFXRate, # value of a unit of foreign in domestic currency
domDiscountCurve,
forDiscountCurve,
model):
''' This function calculates the FX Option Gamma using the spot delta. '''
if type(valueDate) == FinDate:
t = (self._expiryDate - valueDate) / gDaysInYear
else:
t = valueDate
if np.any(spotFXRate <= 0.0):
raise FinError("FX Rate must be greater than zero.")
if model._parentType != FinFXModel:
raise FinError("Model is not inherited off type FinFXModel.")
if np.any(t < 0.0):
raise FinError("Time to expiry must be positive.")
t = np.maximum(t, 1e-10)
domDf = domDiscountCurve.df(t)
rd = -np.log(domDf)/t
forDf = forDiscountCurve.df(t)
rf = -np.log(forDf)/t
K = self._strikeFXRate
S0 = spotFXRate
if type(model) == FinFXModelBlackScholes:
volatility = model._volatility
if np.any(volatility) < 0.0:
raise FinError("Volatility should not be negative.")
volatility = np.maximum(volatility, 1e-10)
lnS0k = np.log(S0 / K)
sqrtT = np.sqrt(t)
den = volatility * sqrtT
mu = rd - rf
v2 = volatility * volatility
d1 = (lnS0k + (mu + v2 / 2.0) * t) / den
gamma = np.exp(-rf * t) * nprime(d1)
gamma = gamma / S0 / den
else:
raise FinError("Unknown Model Type")
return gamma
###############################################################################
###############################################################################
def vega(self,
valueDate,
spotFXRate, # value of a unit of foreign in domestic currency
domDiscountCurve,
forDiscountCurve,
model):
''' This function calculates the FX Option Vega using the spot delta. '''
if type(valueDate) == FinDate:
t = (self._expiryDate - valueDate) / gDaysInYear
else:
t = valueDate
if np.any(spotFXRate <= 0.0):
raise FinError("Spot FX Rate must be greater than zero.")
if model._parentType != FinFXModel:
raise FinError("Model is not inherited off type FinEquityModel.")
if np.any(t < 0.0):
raise FinError("Time to expiry must be positive.")
t = np.maximum(t, 1e-10)
domDf = domDiscountCurve.df(t)
rd = -np.log(domDf)/t
forDf = forDiscountCurve.df(t)
rf = -np.log(forDf)/t
K = self._strikeFXRate
S0 = spotFXRate
if type(model) == FinFXModelBlackScholes:
volatility = model._volatility
if np.any(volatility) < 0.0:
raise FinError("Volatility should not be negative.")
volatility = np.maximum(volatility, 1e-10)
lnS0k = np.log(S0/K)
sqrtT = np.sqrt(t)
den = volatility * sqrtT
mu = rd - rf
v2 = volatility * volatility
d1 = (lnS0k + (mu + v2 / 2.0) * t) / den
vega = S0 * sqrtT * np.exp(-rf * t) * nprime(d1)
else:
raise FinError("Unknown Model type")
return vega
###############################################################################
###############################################################################
def theta(self,
valueDate,
spotFXRate, # value of a unit of foreign in domestic currency
domDiscountCurve,
forDiscountCurve,
model):
''' This function calculates the time decay of the FX option. '''
if type(valueDate) == FinDate:
t = (self._expiryDate - valueDate) / gDaysInYear
else:
t = valueDate
if np.any(spotFXRate <= 0.0):
raise FinError("Spot FX Rate must be greater than zero.")
if model._parentType != FinFXModel:
raise FinError("Model is not inherited off type FinEquityModel.")
if np.any(t < 0.0):
raise FinError("Time to expiry must be positive.")
t = np.maximum(t, 1e-10)
domDf = domDiscountCurve.df(t)
rd = -np.log(domDf)/t
forDf = forDiscountCurve.df(t)
rf = -np.log(forDf)/t
K = self._strikeFXRate
S0 = spotFXRate
if type(model) == FinFXModelBlackScholes:
vol = model._volatility
if np.any(vol) < 0.0:
raise FinError("Volatility should not be negative.")
vol = np.maximum(vol, 1e-10)
lnS0k = np.log(S0/K)
sqrtT = np.sqrt(t)
den = vol * sqrtT
mu = rd - rf
v2 = vol * vol
d1 = (lnS0k + (mu + v2 / 2.0) * t) / den
d2 = (lnS0k + (mu - v2 / 2.0) * t) / den
if self._optionType == FinOptionTypes.EUROPEAN_CALL:
v = - S0 * np.exp(-rf * t) * nprime(d1) * vol / 2.0 / sqrtT
v = v + rf * S0 * np.exp(-rf * t) * N(d1)
v = v - rd * K * np.exp(-rd * t) * N(d2)
elif self._optionType == FinOptionTypes.EUROPEAN_PUT:
v = - S0 * np.exp(-rf * t) * nprime(d1) * vol / 2.0 / sqrtT
v = v + rd * K * np.exp(-rd * t) * N(-d2)
v = v - rf * S0 * np.exp(-rf * t) * N(-d1)
else:
raise FinError("Unknown option type")
else:
raise FinError("Unknown Model Type")
return v
###############################################################################
###############################################################################
def impliedVolatility(self,
valueDate,
stockPrice,
discountCurve,
dividendYield,
price):
''' This function determines the implied volatility of an FX option
given a price and the other option details. It uses a one-dimensional
Newton root search algorith to determine the implied volatility. '''
argtuple = (self, valueDate, stockPrice,
discountCurve, dividendYield, price)
sigma = optimize.newton(f, x0=0.2, fprime=fvega, args=argtuple,
tol=1e-5, maxiter=50, fprime2=None)
return sigma
###############################################################################
###############################################################################
def valueMC(self,
valueDate,
spotFXRate,
domDiscountCurve,
forDiscountCurve,
model,
numPaths=10000,
seed=4242):
''' Calculate the value of an FX Option using Monte Carlo methods.
This function can be used to validate the risk measures calculated
above or used as the starting code for a model exotic FX product that
cannot be priced analytically. This function uses Numpy vectorisation
for speed of execution.'''
if model._parentType == FinFXModel:
volatility = model._volatility
else:
raise FinError("Model Type invalid")
np.random.seed(seed)
t = (self._expiryDate - valueDate) / gDaysInYear
domDF = domDiscountCurve.df(self._expiryDate)
forDF = forDiscountCurve.df(self._expiryDate)
rd = -np.log(domDF)/t
rf = -np.log(forDF)/t
mu = rd - rf
v2 = volatility**2
K = self._strikeFXRate
sqrtdt = np.sqrt(t)
# Use Antithetic variables
g = np.random.normal(0.0, 1.0, size=(1, numPaths))
s = spotFXRate * np.exp((mu - v2 / 2.0) * t)
m = np.exp(g * sqrtdt * volatility)
s_1 = s * m
s_2 = s / m
if self._optionType == FinOptionTypes.EUROPEAN_CALL:
payoff_a_1 = np.maximum(s_1 - K, 0)
payoff_a_2 = np.maximum(s_2 - K, 0)
elif self._optionType == FinOptionTypes.EUROPEAN_PUT:
payoff_a_1 = np.maximum(K - s_1, 0)
payoff_a_2 = np.maximum(K - s_2, 0)
else:
raise FinError("Unknown option type.")
payoff = np.mean(payoff_a_1) + np.mean(payoff_a_2)
v = payoff * np.exp(-rd * t) / 2.0
return v
###############################################################################
###############################################################################
|
from unittest import TestCase
import math
import dexpy.design as design
import numpy as np
import pandas as pd
class TestModelMatrix(TestCase):
"""Tests for generating a model matrix"""
@classmethod
def test_quadratic_model(cls):
"""Test expanding a quadratic model in a rotatable ccd"""
axial_pt = math.sqrt(2)
factor_data = [
[-1, -1],
[ 1, -1],
[-1, 1],
[ 1, 1],
[-axial_pt, 0],
[axial_pt, 0],
[ 0, -axial_pt],
[ 0, axial_pt],
[ 0, 0]
]
factor_data = pd.DataFrame(factor_data, columns=design.get_factor_names(len(factor_data[0])))
X = design.create_model_matrix(factor_data, "1 + X1 + X2 + X1:X2 + I(X1**2) + I(X2**2)")
np.testing.assert_almost_equal([1.0, axial_pt, 0.0, -0.0, pow(axial_pt, 2), 0.0], X[5])
|
import os
import datetime
import logging
import random
# os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
import numpy as np
import tensorflow as tf
import PIL
from train_data import TrainData
from fractal_gen import FractalGenTensorflowModel
from models_resnet import build_model
logging.basicConfig(
format='[%(asctime)s][%(levelname)-5.5s] %(message)s',
level=logging.INFO,
handlers=[
logging.FileHandler('.data/log.txt'),
logging.StreamHandler()
])
# constants, directory paths
tensorboard_dir = '.data\\tensorboard' # this uses backslash because the internal keras tensorboard callback improperly handles forward slash paths
train_checkpoints_dir = '.data/model_checkpoints'
sample_inputs_dir = '.data/model_sample_inputs'
sample_outputs_dir = '.data/model_samples'
# private vars, used to contain state that can be referenced by keras events
# initialized in __main__
_current_epoch = 1
_model = None
class TrainProcessor():
def __init__(self):
self._current_epoch = 1
self._model = None
def get_last_checkpoint():
ld = [int(x) for x in os.listdir(train_checkpoints_dir)]
ld.sort()
if len(ld) > 0:
return int(ld[-1])
self._model = build_model()
last_checkpoint = get_last_checkpoint()
if last_checkpoint:
self._current_epoch = last_checkpoint + 1
self._model.load_weights(f'{train_checkpoints_dir}/{last_checkpoint}/model_weights')
def train(self, epochs, batch_size=8):
all_train_callbacks = [
# Disable this for now since it's unnecessary
# tf.keras.callbacks.TensorBoard(log_dir=tensorboard_dir),
tf.keras.callbacks.LambdaCallback(
on_epoch_begin=lambda epoch_num, logs: self._on_train_epoch_begin(),
on_epoch_end=lambda epoch_num, logs: self._on_train_epoch_end(logs['loss'])
)
]
batch_gen = self._train_generator(batch_size=batch_size)
batch_iter = 0
for features, labels in batch_gen:
self._model.fit(
x=features,
y=labels,
epochs=1,
shuffle=False,
callbacks=all_train_callbacks)
batch_iter += 1
if batch_iter >= epochs: break
def preprocess_pil_image(img):
""" Preprocess PIL image into the input data type for our keras model """
data = np.asarray(img, dtype=np.uint8)
data = np.reshape((data.astype(dtype=np.float32) / 255.0), [1, 1080, 1920, 3])
img.close()
return data
def postprocess_pil_image(npdata):
""" Postprocess output data from our keras model into a PIL image """
npdata = np.asarray(np.clip(npdata[0] * 255, 0, 255), dtype=np.uint8)
return PIL.Image.fromarray(npdata, 'RGB')
def _on_train_epoch_begin(self):
logging.info(f'epoch begin {self._current_epoch}')
def _on_train_epoch_end(self, loss):
epoch = self._current_epoch
logging.info(f'epoch end {epoch}, loss={loss}')
if np.isnan(loss):
exit()
if (epoch % 500) == 0:
self._save_model_checkpoint()
self._process_sample_images()
self._current_epoch += 1
def _train_generator(self, batch_size=8):
features = []
labels = []
while True:
td = TrainData.get_random(logging=False)
feature = TrainProcessor.preprocess_pil_image(td.get_train_image())
label = TrainProcessor.preprocess_pil_image(td.get_next_train_image())
features.append(feature[0])
labels.append(label[0])
if len(features) >= batch_size:
yield (np.array(features), np.array(labels))
features = []
labels = []
def _save_model_checkpoint(self):
model = self._model
epoch = self._current_epoch
logging.info(f'begin save model weights after epoch {epoch}')
out_dir = f'{train_checkpoints_dir}/{epoch}'
if not os.path.exists(out_dir):
os.mkdir(out_dir)
model.save_weights(f'{out_dir}/model_weights')
logging.info(f'model weights after epoch {epoch} save end')
def _process_sample_images(self):
""" Processes images in the '.data/model_sample_inputs' directory through the model, each with 5 samples """
model = self._model
epoch = self._current_epoch
for img in os.listdir(sample_inputs_dir):
out_dir = f'{sample_outputs_dir}/{img}'
if not os.path.exists(out_dir):
os.mkdir(out_dir)
logging.info(f'process sample {img}')
try:
x = PIL.Image.open(f'{sample_inputs_dir}/{img}')
x.load()
x.save(f'{out_dir}/{epoch}-0.png')
x = TrainProcessor.preprocess_pil_image(x)
max_iters = 10
for i in range(1, max_iters + 1):
x = model.predict(x)
y = TrainProcessor.postprocess_pil_image(x)
y.save(f'{out_dir}/{epoch}-{i}.png')
y.close()
logging.info(f'process sample {img} completed {i}/{max_iters}')
except Exception as e:
logging.error(f'exception processing sample {img}', exc_info=True)
pass
if __name__ == '__main__':
if not os.path.exists('.data'):
os.mkdir('.data')
if not os.path.exists(tensorboard_dir):
os.mkdir(tensorboard_dir)
if not os.path.exists(train_checkpoints_dir):
os.mkdir(train_checkpoints_dir)
if not os.path.exists(sample_inputs_dir):
os.mkdir(sample_inputs_dir)
if not os.path.exists(sample_outputs_dir):
os.mkdir(sample_outputs_dir)
proc = TrainProcessor()
proc.train(100000, batch_size=1)
# model = proc._model
# x = PIL.Image.open(f'.data/model_sample_inputs/tree.png')
# x.load()
# x = TrainProcessor.preprocess_pil_image(x)
# for i in range(0, 2000):
# print(f'iter {i}')
# x = model.predict(x)
# y = TrainProcessor.postprocess_pil_image(x)
# y.save(f'.data/gen/{i}.png')
# y.close() |
""" DAWNets handling """
from builtins import object
from future.utils import with_metaclass
import abc
import os
import sys
import copy
import pprint
import logging
from textx.metamodel import metamodel_from_file
from textx.exceptions import TextXError
from . import dawnyaml
from .. import utils
# TextX schema for guards
GUARD_MM = metamodel_from_file(os.path.abspath(os.path.join(os.path.dirname(__file__), 'dawnet_guards_grammar.tx')))
def parse_guard(guard_expr):
if guard_expr:
try:
# TextX requires unicode in Python < 3
# (see <https://github.com/textX/textX/blob/00c0ec3a27c6aae051e63530be90eba0b3e8a90a/textx/metamodel.py#L22>)
return GUARD_MM.model_from_str(str(guard_expr) if sys.version_info[0] >= 3 else unicode(guard_expr))
except TextXError as e:
logging.error("Error in guard [{}]: {}".format(guard_expr, e))
return None
else:
return None
def guard_parse_tree(g_model):
tname = type(g_model).__name__
if tname == 'Guard':
return {'type': tname, 'expr': guard_parse_tree(g_model.expr)}
elif tname == 'And_expr':
return {'type': tname,
'terms': [guard_parse_tree(term) for term in g_model.terms]}
elif tname == 'Or_expr':
return {'type': tname,
'terms': [guard_parse_tree(term) for term in g_model.terms]}
elif tname == 'Parens_expr':
return {'type': tname, 'expr': guard_parse_tree(g_model.expr)}
elif tname == 'Comparison':
return {'type': tname,
'op': g_model.op,
'lhs': guard_parse_tree(g_model.lhs),
'rhs': guard_parse_tree(g_model.rhs)}
elif tname == 'Truth':
return {'type': tname, 'value': g_model.value}
elif tname == 'Var':
return {'type': tname, 'value': g_model.id}
elif tname == 'Const':
return {'type': tname, 'value': str(g_model.value)}
else:
logging.error('unknown guard "{}" of category {}'.format(str(g_model), tname))
return {'type': tname, 'value': str(g_model)}
def check_guard(guard_expr):
def sexp(op, args):
return {'op': op, 'args': args}
def simplify_guard(g_expr):
tname = type(g_expr).__name__
if tname == 'Guard':
return simplify_guard(g_expr.expr)
elif tname == 'And_expr':
if len(g_expr.terms) > 1:
return ANDexpr([simplify_guard(ge) for ge in g_expr.terms])
else:
return simplify_guard(g_expr.terms[0])
elif tname == 'Or_expr':
if len(g_expr.terms) > 1:
return ORexpr([simplify_guard(ge) for ge in g_expr.terms])
else:
return simplify_guard(g_expr.terms[0])
elif tname == 'Parens_expr':
return simplify_guard(g_expr.expr)
elif tname == 'Comparison':
return Comparison(g_expr.op, simplify_guard(g_expr.lhs), simplify_guard(g_expr.rhs))
elif tname == 'Truth':
return Truthvalue(g_expr.value)
elif tname == 'Var':
return g_expr.id
elif tname == 'Const':
return "'{}'".format(str(g_expr.value))
else:
logging.error('unknown guard "{}" of category {}'.format(str(g_expr), tname))
guard_model = parse_guard(guard_expr)
return simplify_guard(guard_model) if guard_model else None
def ORexpr(args):
terms = []
for arg in args:
if arg is not None:
if arg.isOR():
terms += arg.getArguments()
elif isinstance(arg, Truthvalue):
if arg.isTrue():
return Truthvalue('true')
else:
terms.append(arg)
if len(terms) < 1:
return Truthvalue('true')
elif len(terms) == 1:
return terms[0]
else:
return Or(terms)
def ANDexpr(args):
terms = []
for arg in args:
if arg is not None:
if arg.isAND():
terms += arg.getArguments()
elif isinstance(arg, Truthvalue):
if arg.isFalse():
return Truthvalue('false')
else:
terms.append(arg)
if len(terms) < 1:
return Truthvalue('false')
elif len(terms) == 1:
return terms[0]
else:
return And(terms)
class Guard(with_metaclass(abc.ABCMeta, object)):
_op = None
_args = []
def isOR(self):
return self._op == 'OR'
def isAND(self):
return self._op == 'AND'
def isComparison(self):
return isinstance(self, Comparison)
def isTruthValue(self):
return isinstance(self, Truthvalue)
def isAtom(self):
return not (self.isAND() or self.isOR())
def getArguments(self):
return self._args
def getOperator(self):
return self._op
def prefixExpr(self):
if isinstance(self, Comparison):
return '{}({}, {})'.format(self.getOperator(), self.getArguments()[0], self.getArguments()[1])
elif isinstance(self, Truthvalue):
return self.getOperator()
else:
return '{}({})'.format('And' if self.isAND() else 'Or', ', '.join([arg.prefixExpr() for arg in self.getArguments()]))
@staticmethod
def isConst(term):
strTerm = str(term)
return strTerm[0] == "'" and strTerm[-1] == "'"
@staticmethod
def constName(term):
return str(term).strip("'").replace(' ', '_')
@abc.abstractmethod
def getConstants(self):
"""Returns the set of contants referred within the guard."""
pass
class And(Guard):
def __init__(self, args):
self._op = 'AND'
self._args = args
def __repr__(self):
return '(' + ' & '.join([pprint.pformat(e) for e in self.getArguments()]) + ')'
def getConstants(self):
return set().union(*[s.getConstants() for s in self.getArguments()])
class Or(Guard):
def __init__(self, args):
self._op = 'OR'
self._args = args
def __repr__(self):
return '(' + ' | '.join([pprint.pformat(e) for e in self.getArguments()]) + ')'
def getConstants(self):
return set().union(*[s.getConstants() for s in self.getArguments()])
class Truthvalue(Guard):
def __init__(self, value):
self._op = 'true' if value.lower() == 'true' else 'false'
def __repr__(self):
return self._op
def isTrue(self):
return self._op == 'true'
def isFalse(self):
return not self.isTrue()
def getConstants(self):
return set()
class Comparison(Guard):
def __init__(self, op, lhs, rhs):
self._op = op
self._args = [lhs, rhs]
def __repr__(self):
return '{}{}{}'.format(self.getArguments()[0], self.getOperator(), self.getArguments()[1])
def lhs(self):
return self._args[0]
def rhs(self):
return self._args[1]
def getConstants(self):
# WARNING: it might return variable names
return set([Guard.constName(x) for x in self.getArguments()])
def readDAWNET(stream):
dawnet_obj = dawnyaml.readDAWNETobj(stream)
return DAWNet(dawnet_obj) if dawnet_obj is not None else None
def readTrace(stream):
return dawnyaml.readTraceObj(stream)
def getGuardExpr(tobj, lang='default'):
guard_obj = tobj.get('guard', {})
if isinstance(guard_obj, dict):
guard_obj.get(lang, None)
else:
return guard_obj if lang == 'default' else None
class DAWNet(object):
@staticmethod
def reduced_domain_flag():
return utils.get_main_conf('reducedomain', type=bool)
@staticmethod
def nodata_flag():
return utils.get_main_conf('nodata', type=bool)
def __init__(self, dawnetOBJ):
self._json_obj = dawnetOBJ if isinstance(dawnetOBJ, dict) else dict()
self._name = dawnetOBJ.get('name', 'DAWNet')
self._transitions = self._json_obj.get('transitions', {})
self._place_adj = {}
self._place_insets = {}
self._vars = {}
self._guards = {}
self._ad = set()
self._guardConsts = set()
for (name, t) in self._transitions.items():
self._updateTransitionInstance(name, t)
self.updateStartEndPlaces()
def _updateTransitionInstance(self, name, tobj):
for p in tobj['outflows']:
if p not in self._place_adj:
self._place_adj[p] = set()
if p not in self._place_insets:
self._place_insets[p] = set()
self._place_insets[p].add(name)
for p in tobj['inflows']:
if p not in self._place_adj:
self._place_adj[p] = set()
if p not in self._place_insets:
self._place_insets[p] = set()
self._place_adj[p].add(name)
for (v, obs) in tobj.get('updates', {}).items():
# make sure update values are strings
v_values = set([str(o) for o in obs])
self._ad.update(v_values)
if v in self._vars:
self._vars[v] = self._vars[v] | v_values
else:
self._vars[v] = v_values
guard = check_guard(getGuardExpr(tobj))
if guard:
self._guards[name] = guard
self._guardConsts.update(guard.getConstants())
# Guard getConstants might return variable names
self._guardConsts.difference_update(self.variableNames())
def updateStartEndPlaces(self):
startPlaces = [name for (name, trSet) in self._place_insets.items() if len(trSet) < 1]
if len(startPlaces) > 1:
logging.error('Too many start places {}.'.format(', '.join(startPlaces)))
if len(startPlaces) < 1:
logging.error('Missing start place.')
self._start = startPlaces[0] if len(startPlaces) > 0 else None
endPlaces = [name for (name, trSet) in self._place_adj.items() if len(trSet) < 1]
if len(endPlaces) > 1:
logging.error('Too many end places {}.'.format(', '.join(endPlaces)))
if len(endPlaces) < 1:
logging.error('Missing end place.')
self._end = endPlaces[0] if len(endPlaces) > 0 else None
def getSummary(self):
dawnetDesc = {
'Transitions': self.transitionNames(),
'Places': self.placeNames(), 'start place': self.startPlace(), 'end place': self.endPlace(),
'Vars': {v: list(self.varDomain(v)) for v in self.variableNames()},
'Guards': {key: str(guard) for (key, guard) in self.guards().items()},
'Guard Consts': list(self.getGuardConst()),
'Active domain': list(self.getAD()),
'Full domain': list(self._ad)
}
return dawnetDesc
def show(self):
dawnyaml.write_yaml(self.getSummary())
def toYAML(self, stream):
dawnetOBJ = {'name': self.name(), 'transitions': self.transitions()}
dawnyaml.write_yaml(dawnetOBJ, stream)
def transitions(self):
return self._transitions
def guards(self):
return {} if self.nodata_flag() else self._guards
def name(self):
return self._name
def startPlace(self):
return self._start
def endPlace(self):
return self._end
def transitionNames(self):
return self._transitions.keys()
def placeNames(self):
return self._place_adj.keys()
def variableNames(self):
return [] if self.nodata_flag() else self._vars.keys()
def varDomain(self, name):
return self._vars[name].intersection(self.getAD())
def getAD(self):
if self.nodata_flag():
return set()
elif self.reduced_domain_flag():
return self.getGuardConst()
else:
return self._ad
def getGuardConst(self):
return set() if self.nodata_flag() else self._guardConsts
def isVariable(self, name):
return name in self._vars
def inflows(self, transition):
return self._transitions.get(transition, {}).get('inflows', [])
def outflows(self, transition):
return self._transitions.get(transition, {}).get('outflows', [])
def updates(self, transition):
if self.nodata_flag():
return {}
elif self.reduced_domain_flag():
reduced_updates = {}
for var, values in self._transitions.get(transition, {}).get('updates', {}).items():
reduced_values = [value for value in values if value in self.getAD()]
if not (len(reduced_values) == 0 and len(values) > 0):
# when all the update values have been removed then
# variable shouldn't be touched
reduced_updates[var] = reduced_values
return reduced_updates
else:
return self._transitions.get(transition, {}).get('updates', {})
def guard(self, transition):
return None if self.nodata_flag() else self._transitions.get(transition, {}).get('guard', None)
def guardExpr(self, transition, lang='default'):
return None if self.nodata_flag() else getGuardExpr(self._transitions.get(transition, {}), lang)
def guardObj(self, transition):
return self.guards().get(transition, None)
def hasGuard(self, transition):
return False if self.nodata_flag() else transition in self.guards()
def newTransition(self, name, inflows, outflows, guard=None, updates=None):
tobj = {'inflows': inflows, 'outflows': outflows}
if updates:
tobj['updates'] = updates
if guard:
tobj['guard'] = guard
self._transitions[name] = tobj
# pprint.PrettyPrinter(indent=2).pprint(tobj)
self._updateTransitionInstance(name, tobj)
return tobj
def embed_trace(dawnet, trace):
class WrongTransition(Exception):
pass
def stepName(step):
return 'tr_stp{}'.format(step)
def trName(name, step):
return 'tr_{}_{}'.format(name, step)
# make a copy of the original DAWNet
ext_dawnet = copy.deepcopy(dawnet)
# See file:trace-schema.json for expected format of a trace
trace_step = 0
for log_item in trace['trace']:
tr_name = log_item['transition']
try:
if tr_name not in ext_dawnet.transitions():
raise WrongTransition('transition {} is not in the model'.format(tr_name))
tr_updates = ext_dawnet.updates(tr_name)
updates = {}
if 'updates' in log_item:
for (varname, varvalue) in log_item['updates'].items():
# if (str(varvalue) in ext_dawnet.updates(tr_name).get('varname', [])):
if (varname in tr_updates and str(varvalue) in tr_updates[varname]):
updates[varname] = [str(varvalue)]
else:
logging.warning('Dropping update {}:{} for transition {}'.format(varname, varvalue, tr_name))
inflows = list(ext_dawnet.inflows(tr_name))
inflows.append(stepName(trace_step))
trace_step += 1
outflows = list(ext_dawnet.outflows(tr_name))
outflows.append(stepName(trace_step))
guard = ext_dawnet.guard(tr_name)
ext_dawnet.newTransition(trName(tr_name, trace_step), inflows, outflows, guard, updates if len(updates.keys()) > 0 else None)
except WrongTransition as e:
logging.error('Dropping transition {}: {}'.format(tr_name, e))
if trace_step > 0:
# Add new start and end places
ext_dawnet.newTransition(trName('start', ''), [trName('pstart', '')], [stepName(0), ext_dawnet.startPlace()])
ext_dawnet.newTransition(trName('end', ''), [stepName(trace_step), ext_dawnet.endPlace()], [trName('pend', '')])
ext_dawnet.updateStartEndPlaces()
return ext_dawnet
|
'''
Assignment 22 :
In cryptography, a Caesar cipher is a very simple encryption techniques in which each letter in the plain text is
replaced by a letter some fixed number of positions down the alphabet. For example, with a shift of 3, A would be
replaced by D, B would become E, and so on. The method is named after Julius Caesar, who used it to communicate with
his generals. ROT-13 ("rotate by 13 places") is a widely used example of a Caesar cipher where the shift is 13.
In Python, the key for ROT-13 may be represented by means of the following dictionary:
key = {'a':'n', 'b':'o', 'c':'p', 'd':'q', 'e':'r', 'f':'s', 'g':'t', 'h':'u',
'i':'v', 'j':'w', 'k':'x', 'l':'y', 'm':'z', 'n':'a', 'o':'b', 'p':'c',
'q':'d', 'r':'e', 's':'f', 't':'g', 'u':'h', 'v':'i', 'w':'j', 'x':'k',
'y':'l', 'z':'m', 'A':'N', 'B':'O', 'C':'P', 'D':'Q', 'E':'R', 'F':'S',
'G':'T', 'H':'U', 'I':'V', 'J':'W', 'K':'X', 'L':'Y', 'M':'Z', 'N':'A',
'O':'B', 'P':'C', 'Q':'D', 'R':'E', 'S':'F', 'T':'G', 'U':'H', 'V':'I',
'W':'J', 'X':'K', 'Y':'L', 'Z':'M'}
Your task in this exercise is to implement an encoder/decoder of ROT-13. Once you're done, you will be able to
read the following secret message:
Pnrfne pvcure? V zhpu cersre Pnrfne fnynq!
Note that since English has 26 characters, your ROT-13 program will be able to both encode and decode texts written in
English.
'''
key = {'a':'n', 'b':'o', 'c':'p', 'd':'q', 'e':'r', 'f':'s', 'g':'t', 'h':'u',
'i':'v', 'j':'w', 'k':'x', 'l':'y', 'm':'z', 'n':'a', 'o':'b', 'p':'c',
'q':'d', 'r':'e', 's':'f', 't':'g', 'u':'h', 'v':'i', 'w':'j', 'x':'k',
'y':'l', 'z':'m', 'A':'N', 'B':'O', 'C':'P', 'D':'Q', 'E':'R', 'F':'S',
'G':'T', 'H':'U', 'I':'V', 'J':'W', 'K':'X', 'L':'Y', 'M':'Z', 'N':'A',
'O':'B', 'P':'C', 'Q':'D', 'R':'E', 'S':'F', 'T':'G', 'U':'H', 'V':'I',
'W':'J', 'X':'K', 'Y':'L', 'Z':'M'}
def Encoder(Plain_text):
Cipher_text = ''
for Char in Plain_text:
if Char in (",", "'", ".", " ", "?", "!"):
Cipher_text += Char
else:
Cipher_text += key[Char]
print "From ENCODER:"
return Cipher_text
def Decoder(Cipher_text):
Plain_text = ''
for Char in Cipher_text:
if Char in (",", "'", ".", " ", "?", "!"):
Plain_text += Char
else:
Plain_text += key[Char]
print "From DECODER:"
return Plain_text
Plain_text = raw_input("Enter your plain text for Ciphering:")
Cipher_text = Encoder(Plain_text)
print Cipher_text
Plain_text = Decoder(Cipher_text)
print Plain_text
|
import tensorflow as tf
import model
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('testing', '', """ checkpoint file """)
tf.app.flags.DEFINE_string('finetune', '', """ finetune checkpoint file """)
tf.app.flags.DEFINE_integer('batch_size', "5", """ batch_size """)
tf.app.flags.DEFINE_float('learning_rate', "1e-3", """ initial lr """)
tf.app.flags.DEFINE_string('log_dir', "/tmp3/first350/TensorFlow/Logs", """ dir to store ckpt """)
tf.app.flags.DEFINE_string('image_dir', "/tmp3/first350/SegNet-Tutorial/CamVid/train.txt", """ path to CamVid image """)
tf.app.flags.DEFINE_string('test_dir', "/tmp3/first350/SegNet-Tutorial/CamVid/test.txt", """ path to CamVid test image """)
tf.app.flags.DEFINE_string('val_dir', "/tmp3/first350/SegNet-Tutorial/CamVid/val.txt", """ path to CamVid val image """)
tf.app.flags.DEFINE_integer('max_steps', "20000", """ max_steps """)
tf.app.flags.DEFINE_integer('image_h', "360", """ image height """)
tf.app.flags.DEFINE_integer('image_w', "480", """ image width """)
tf.app.flags.DEFINE_integer('image_c', "3", """ image channel (RGB) """)
tf.app.flags.DEFINE_integer('num_class', "11", """ total class number """)
tf.app.flags.DEFINE_boolean('save_image', True, """ whether to save predicted image """)
def checkArgs():
if FLAGS.testing != '':
print('The model is set to Testing')
print("check point file: %s"%FLAGS.testing)
print("CamVid testing dir: %s"%FLAGS.test_dir)
elif FLAGS.finetune != '':
print('The model is set to Finetune from ckpt')
print("check point file: %s"%FLAGS.finetune)
print("CamVid Image dir: %s"%FLAGS.image_dir)
print("CamVid Val dir: %s"%FLAGS.val_dir)
else:
print('The model is set to Training')
print("Max training Iteration: %d"%FLAGS.max_steps)
print("Initial lr: %f"%FLAGS.learning_rate)
print("CamVid Image dir: %s"%FLAGS.image_dir)
print("CamVid Val dir: %s"%FLAGS.val_dir)
print("Batch Size: %d"%FLAGS.batch_size)
print("Log dir: %s"%FLAGS.log_dir)
def main(args):
checkArgs()
if FLAGS.testing:
model.test(FLAGS)
elif FLAGS.finetune:
model.training(FLAGS, is_finetune=True)
else:
model.training(FLAGS, is_finetune=False)
if __name__ == '__main__':
tf.app.run()
|
from ..filters import FilterByDaterange, FilterByOrder, FilterByStatus, FilterByApplication, FilterByJob
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from saef.models import DatasetSessionMetaData
@login_required()
def dataset_overview(request):
dataset_sessions_metadata = DatasetSessionMetaData.objects.filter()
dataset_sessions_metadata_count = dataset_sessions_metadata.count()
filter_by_status = FilterByStatus(request)
dataset_sessions_metadata = filter_by_status.filter(dataset_sessions_metadata)
filter_by_daterange = FilterByDaterange(request, 'dataset')
dataset_sessions_metadata = filter_by_daterange.filter(dataset_sessions_metadata)
filter_order_by = FilterByOrder(request, 'dataset',
application_order='dataset_session__job_session__application_session',
job_order='dataset_session__job_session')
dataset_sessions_metadata = filter_order_by.filter(dataset_sessions_metadata)
filter_by_application = FilterByApplication(request, 'dataset_session__job_session__application_session')
dataset_sessions_metadata = filter_by_application.filter(dataset_sessions_metadata)
filter_by_job = FilterByJob(request, filter_by_application.selected)
dataset_sessions_metadata = filter_by_job.filter(dataset_sessions_metadata)
response_data = {'dataset_sessions_metadata': dataset_sessions_metadata,
'dataset_sessions_metadata_count': dataset_sessions_metadata_count,
'status_options': filter_by_status.options,
'status_selected': filter_by_status.selected,
'date_options': filter_by_daterange.options,
'date_selected': filter_by_daterange.selected,
'application_options': filter_by_application.options,
'application_selected': filter_by_application.selected,
'job_options': filter_by_job.options,
'job_selected': filter_by_job.selected}
return render(request, 'dataset_overview/dataset_overview.html', response_data) |
from datetime import datetime, timezone
from freezegun import freeze_time
from app.views.handlers.feedback import FeedbackMetadata, FeedbackPayload
from .conftest import (
case_id,
case_ref,
case_type,
channel,
collection_exercise_sid,
data_version,
display_address,
feedback_count,
feedback_text,
feedback_type,
form_type,
language_code,
period_id,
ref_p_end_date,
ref_p_start_date,
region_code,
ru_ref,
schema_name,
started_at,
survey_id,
tx_id,
user_id,
)
@freeze_time(datetime.now(tz=timezone.utc).isoformat())
def test_feedback_payload(
session_data_feedback, schema_feedback, metadata, response_metadata
):
feedback_payload = FeedbackPayload(
metadata=metadata,
response_metadata=response_metadata,
schema=schema_feedback,
case_id=case_id,
submission_language_code=language_code,
feedback_count=session_data_feedback.feedback_count,
feedback_text=feedback_text,
feedback_type=feedback_type,
)
expected_payload = {
"collection": {
"exercise_sid": collection_exercise_sid,
"period": period_id,
"schema_name": schema_name,
},
"data": {
"feedback_count": str(feedback_count),
"feedback_text": feedback_text,
"feedback_type": feedback_type,
},
"form_type": form_type,
"launch_language_code": "en",
"metadata": {
"display_address": display_address,
"ref_period_end_date": ref_p_end_date,
"ref_period_start_date": ref_p_start_date,
"ru_ref": ru_ref,
"user_id": user_id,
},
"origin": "uk.gov.ons.edc.eq",
"case_id": case_id,
"started_at": started_at,
"submitted_at": datetime.now(tz=timezone.utc).isoformat(),
"flushed": False,
"survey_id": survey_id,
"submission_language_code": language_code,
"tx_id": tx_id,
"type": "uk.gov.ons.edc.eq:feedback",
"version": data_version,
"case_type": case_type,
"channel": channel,
"region_code": region_code,
"case_ref": case_ref,
}
assert expected_payload == feedback_payload()
def test_feedback_metadata():
feedback_metadata = FeedbackMetadata(case_id, tx_id)
expected_metadata = {
"case_id": case_id,
"tx_id": tx_id,
}
assert feedback_metadata() == expected_metadata
|
class TakoException(Exception):
pass
class TaskFailed(TakoException):
pass
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A shared library to validate 'gcloud test' CLI argument values."""
import re
import sys
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core import log
class InvalidArgException(exceptions.InvalidArgumentException):
"""InvalidArgException is for malformed gcloud test argument values.
It provides a wrapper around Calliope's InvalidArgumentException that
conveniently converts internal arg names with underscores into the external
arg names with hyphens.
"""
def __init__(self, param_name, message):
super(InvalidArgException, self).__init__(ExternalArgNameFrom(param_name),
message)
def ValidateArgFromFile(arg_internal_name, arg_value):
"""Do checks/mutations on arg values parsed from YAML which need validation.
Any arg not appearing in the _FILE_ARG_VALIDATORS dictionary is assumed to be
a simple string to be validated by the default _ValidateString() function.
Mutations of the args are done in limited cases to improve ease-of-use.
This includes:
1) The YAML parser automatically converts attribute values into numeric types
where possible. The os-version-ids for Android devices happen to be integers,
but the Testing service expects them to be strings, so we automatically
convert them to strings so users don't have to quote each one.
2) The include: keyword, plus all test args that normally expect lists (e.g.
device-ids, os-version-ids, locales, orientations...), will also accept a
single value which is not specified using YAML list notation (e.g not enclosed
in []). Such single values are automatically converted into a list containing
one element.
Args:
arg_internal_name: the internal form of the arg name.
arg_value: the argument's value as parsed from the yaml file.
Returns:
The validated argument value.
Raises:
InvalidArgException: If the arg value is missing or is not valid.
"""
if arg_value is None:
raise InvalidArgException(arg_internal_name, 'no argument value found.')
if arg_internal_name in _FILE_ARG_VALIDATORS:
return _FILE_ARG_VALIDATORS[arg_internal_name](arg_internal_name, arg_value)
return _ValidateString(arg_internal_name, arg_value)
# Constants shared between arg-file validation and CLI flag validation.
POSITIVE_INT_PARSER = arg_parsers.BoundedInt(1, sys.maxint)
NONNEGATIVE_INT_PARSER = arg_parsers.BoundedInt(0, sys.maxint)
TIMEOUT_PARSER = arg_parsers.Duration(lower_bound='1m', upper_bound='6h')
ORIENTATION_LIST = ['portrait', 'landscape']
def ValidateStringList(arg_internal_name, arg_value):
"""Validates an arg whose value should be a list of strings.
Args:
arg_internal_name: the internal form of the arg name.
arg_value: the argument's value parsed from yaml file.
Returns:
The validated argument value.
Raises:
InvalidArgException: the argument's value is not valid.
"""
if isinstance(arg_value, basestring): # convert single str to a str list
return [arg_value]
if isinstance(arg_value, int): # convert single int to a str list
return [str(arg_value)]
if isinstance(arg_value, list):
return [_ValidateString(arg_internal_name, value) for value in arg_value]
raise InvalidArgException(arg_internal_name, arg_value)
def _ValidateString(arg_internal_name, arg_value):
"""Validates an arg whose value should be a simple string."""
if isinstance(arg_value, basestring):
return arg_value
if isinstance(arg_value, int): # convert int->str if str is really expected
return str(arg_value)
raise InvalidArgException(arg_internal_name, arg_value)
def _ValidateBool(arg_internal_name, arg_value):
"""Validates an argument which should have a boolean value."""
# Note: the python yaml parser automatically does string->bool conversion for
# true/True/TRUE/false/False/FALSE and also for variations of on/off/yes/no.
if isinstance(arg_value, bool):
return arg_value
raise InvalidArgException(arg_internal_name, arg_value)
def _ValidateDuration(arg_internal_name, arg_value):
"""Validates an argument which should have a Duration value."""
try:
if isinstance(arg_value, basestring):
return TIMEOUT_PARSER(arg_value)
elif isinstance(arg_value, int):
return TIMEOUT_PARSER(str(arg_value))
except arg_parsers.ArgumentTypeError as e:
raise InvalidArgException(arg_internal_name, e.message)
raise InvalidArgException(arg_internal_name, arg_value)
def _ValidateInteger(arg_internal_name, arg_value):
"""Validates an argument which should have any integer value."""
if isinstance(arg_value, int):
return arg_value
raise InvalidArgException(arg_internal_name, arg_value)
def _ValidatePositiveInteger(arg_internal_name, arg_value):
"""Validates an argument which should be an integer > 0."""
try:
if isinstance(arg_value, int):
return POSITIVE_INT_PARSER(str(arg_value))
except arg_parsers.ArgumentTypeError as e:
raise InvalidArgException(arg_internal_name, e.message)
raise InvalidArgException(arg_internal_name, arg_value)
def _ValidateNonNegativeInteger(arg_internal_name, arg_value):
"""Validates an argument which should be an integer >= 0."""
try:
if isinstance(arg_value, int):
return NONNEGATIVE_INT_PARSER(str(arg_value))
except arg_parsers.ArgumentTypeError as e:
raise InvalidArgException(arg_internal_name, e.message)
raise InvalidArgException(arg_internal_name, arg_value)
def _ValidateOrientationList(arg_internal_name, arg_value):
"""Validates that 'orientations' only contains 'portrait' and 'landscape'."""
arg_value = ValidateStringList(arg_internal_name, arg_value)
for orientation in arg_value:
if orientation not in ORIENTATION_LIST:
raise InvalidArgException(arg_internal_name, orientation)
if len(arg_value) != len(set(arg_value)):
raise InvalidArgException(arg_internal_name,
'orientations may not be repeated.')
return arg_value
def _ValidateObbFileList(arg_internal_name, arg_value):
"""Validates that 'obb-files' contains at most 2 entries."""
arg_value = ValidateStringList(arg_internal_name, arg_value)
if len(arg_value) > 2:
raise InvalidArgException(arg_internal_name,
'At most two OBB files may be specified.')
return arg_value
def _ValidateKeyValueStringPairs(arg_internal_name, arg_value):
"""Validates that an argument is a dict of string-type key-value pairs."""
if isinstance(arg_value, dict):
new_dict = {}
# Cannot use dict comprehension since it's not supported in Python 2.6.
for (key, value) in arg_value.items():
new_dict[str(key)] = _ValidateString(arg_internal_name, value)
return new_dict
else:
raise InvalidArgException(arg_internal_name, 'Malformed key-value pairs.')
# Map of internal arg names to their appropriate validation functions.
# Any arg not appearing in this map is assumed to be a simple string.
_FILE_ARG_VALIDATORS = {
'async': _ValidateBool,
'auto_google_login': _ValidateBool,
'timeout': _ValidateDuration,
'device_ids': ValidateStringList,
'os_version_ids': ValidateStringList,
'locales': ValidateStringList,
'orientations': _ValidateOrientationList,
'obb_files': _ValidateObbFileList,
'test_targets': ValidateStringList,
'event_count': _ValidatePositiveInteger,
'event_delay': _ValidateNonNegativeInteger,
'random_seed': _ValidateInteger,
'max_steps': _ValidateNonNegativeInteger,
'max_depth': _ValidatePositiveInteger,
'robo_directives': _ValidateKeyValueStringPairs,
'environment_variables': _ValidateKeyValueStringPairs,
'directories_to_pull': ValidateStringList,
}
def InternalArgNameFrom(arg_external_name):
"""Converts a user-visible arg name into its corresponding internal name."""
return arg_external_name.replace('-', '_')
def ExternalArgNameFrom(arg_internal_name):
"""Converts an internal arg name into its corresponding user-visible name."""
return arg_internal_name.replace('_', '-')
# Validation methods below this point are meant to be used on args regardless
# of whether they came from the command-line or an arg-file, while the methods
# above here are only for arg-file args, which bypass the standard validations
# performed by the argparse package (which only works with CLI args).
def ValidateArgsForTestType(
args, test_type, type_rules, shared_rules, all_test_args_set):
"""Raise errors if required args are missing or invalid args are present.
Args:
args: an argparse.Namespace object which contains attributes for all the
arguments that were provided to the command invocation (i.e. command
group and command arguments combined).
test_type: string containing the type of test to run.
type_rules: a nested dictionary defining the required and optional args
per type of test, plus any default values.
shared_rules: a nested dictionary defining the required and optional args
shared among all test types, plus any default values.
all_test_args_set: a set of strings for every gcloud-test argument to use
for validation.
Raises:
InvalidArgException: If an arg doesn't pair with the test type.
RequiredArgumentException: If a required arg for the test type is missing.
"""
required_args = type_rules[test_type]['required'] + shared_rules['required']
optional_args = type_rules[test_type]['optional'] + shared_rules['optional']
allowable_args_for_type = required_args + optional_args
# Raise an error if an optional test arg is not allowed with this test_type.
for arg in all_test_args_set:
if getattr(args, arg, None) is not None: # Ignore args equal to None
if arg not in allowable_args_for_type:
raise InvalidArgException(
arg, "may not be used with test type '{0}'.".format(test_type))
# Raise an error if a required test arg is missing or equal to None.
for arg in required_args:
if getattr(args, arg, None) is None:
raise exceptions.RequiredArgumentException(
'{0}'.format(ExternalArgNameFrom(arg)),
"must be specified with test type '{0}'.".format(test_type))
def ValidateResultsBucket(args):
"""Do some basic sanity checks on the format of the results-bucket arg."""
if args.results_bucket is None:
return
# TODO(user): use the resources module here once it understands gs:// links
if args.results_bucket.startswith('gs://'):
args.results_bucket = args.results_bucket[5:]
args.results_bucket = args.results_bucket.rstrip('/')
if '/' in args.results_bucket:
raise exceptions.InvalidArgumentException(
'results-bucket', 'Results bucket name is not valid')
def ValidateOsVersions(args, catalog):
"""Validate os-version-ids strings against the TestingEnvironmentCatalog.
Also allow users to alternatively specify OS version strings (e.g. '5.1.x')
but translate them here to their corresponding version IDs (e.g. '22').
The final list of validated version IDs is sorted in ascending order.
Args:
args: an argparse namespace. All the arguments that were provided to the
command invocation (i.e. group and command arguments combined).
catalog: the TestingEnvironmentCatalog which includes all valid OS versions
accepted by the Testing service.
"""
validated_versions = set() # Using a set will remove duplicates
version_ids = [v.id for v in catalog.versions]
# TODO(user): use dict comprehensions if py2.6 compatibility is dropped.
# version_to_id_map = {v.versionString: v.id for v in catalog.versions}
version_to_id_map = dict((v.versionString, v.id) for v in catalog.versions)
for vers in args.os_version_ids:
if vers in version_ids:
validated_versions.add(vers)
else:
version_id = version_to_id_map.get(vers, None)
if version_id is None:
raise exceptions.InvalidArgumentException('os-version-ids', vers)
validated_versions.add(version_id)
args.os_version_ids = sorted(validated_versions)
log.info('Testing against OS versions: {0}'.format(args.os_version_ids))
_OBB_FILE_REGEX = re.compile(
r'(.*[\\/:])?(main|patch)\.\d+(\.[a-zA-Z]\w*)+\.obb$')
def ValidateObbFileNames(obb_files):
"""Confirm that any OBB file names follow the required Android pattern."""
for obb_file in (obb_files or []):
if not _OBB_FILE_REGEX.match(obb_file):
raise InvalidArgException(
'obb_files',
'[{0}] is not a valid OBB file name, which must have the format: '
'(main|patch).<versionCode>.<package.name>.obb'.format(obb_file))
def ValidateRoboDirectivesList(args):
"""Validates key-value pairs for 'robo_directives' flag."""
for key in (args.robo_directives or []):
# Check for illegal characters in the key (resource name).
if ':' in key:
raise InvalidArgException(
'robo_directives',
'Invalid character ":" in resource name "{0}"'.format(key))
_ENVIRONMENT_VARIABLE_REGEX = re.compile(r'^[a-zA-Z]\w+$')
def ValidateEnvironmentVariablesList(args):
"""Validates key-value pairs for 'environment-variables' flag."""
for key in (args.environment_variables or []):
# Check for illegal characters in the key.
if not _ENVIRONMENT_VARIABLE_REGEX.match(key):
raise InvalidArgException(
'environment_variables',
'Invalid environment variable "{0}"'.format(key))
_DIRECTORIES_TO_PULL_PATH_REGEX = re.compile(r'^(/.*)+/?$')
def ValidateDirectoriesToPullList(args):
"""Validates list of file paths for 'directories-to-pull' flag."""
for file_path in (args.directories_to_pull or []):
# Check for correct file path format.
if not _DIRECTORIES_TO_PULL_PATH_REGEX.match(file_path):
raise InvalidArgException('directories_to_pull',
'Invalid path "{0}"'.format(file_path))
|
#!/usr/bin/python
import sys
for line in sys.stdin:
data=line.split(" ")
if(len(data)>=5):
reqs=data[5].split("\"")
if len(reqs)>1:
print reqs[1]
|
import pytest
from typeguard import typechecked
from wyrd.constrained_types import ConstrainedInt, add_constraint
@add_constraint(lambda x: x > 0, "must be at least 1")
class Quantity(ConstrainedInt):
pass
@typechecked
def total_quantity(a: Quantity, b: Quantity) -> Quantity:
return Quantity(a + b)
def test_everything_is_fine_when_the_types_are_correct():
assert total_quantity(Quantity(1), Quantity(2)) == Quantity(3)
def test_error_raised_for_incorrect_types():
with pytest.raises(TypeError):
total_quantity(Quantity(1), 2) # type: ignore
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
# In[2]:
#import data
data = pd.read_csv("D:\dataset_FLD.csv")
# In[3]:
positive = data.loc[data['y']==1]
# In[4]:
negative = data.loc[data['y']==0]
# In[5]:
positive = positive.iloc[:,0:3].values
negative = negative.iloc[:,0:3].values
# In[6]:
#calculate means in original space
M1 = np.mean(positive, axis = 0)
M2 = np.mean(negative, axis = 0)
# In[7]:
print("M1 =",M1)
print("M2 =",M2)
# In[8]:
#Calculating Sw
res1 = np.zeros([3,3])
for i in range(len(positive)):
pos = positive[i]-M1
pos.shape = (1,3)
posT = np.transpose(pos)
ans1 = np.dot(posT,pos)
res1 = np.add(res1,ans1)
# In[9]:
res2 = np.zeros([3,3])
for i in range(len(negative)):
neg = negative[i]-M2
neg.shape = (1,3)
negT = np.transpose(neg)
ans2 = np.dot(negT,neg)
res2 = np.add(res2,ans2)
# In[10]:
res1 = res1/len(positive)
res2 = res2/len(negative)
# In[11]:
Sw = res1+res2
print("Sw is\n",Sw)
# In[12]:
#Calculating Sw inverse
sw_inv = np.linalg.inv(Sw)
sw_inv
# In[13]:
#Finding the vector w and normalising it
w = np.dot(sw_inv,np.transpose(M2-M1))
print("w is",w)
# In[14]:
import math
mag = math.sqrt(w[0]*w[0]+w[1]*w[1]+w[2]*w[2])
# In[15]:
w = w/mag
print("normalised w is",w)
# In[16]:
#Finding projections of positive and negative points on unit vector w
positive_projections = np.zeros([len(positive)])
for i in range(len(positive)):
positive_projections[i] = np.dot(w,np.transpose(positive[i]))
# In[17]:
negative_projections = np.zeros([len(negative)])
for i in range(len(negative)):
negative_projections[i] = np.dot(w,np.transpose(negative[i]))
# In[18]:
#Plotting reduced clusters of positive and negative data sets
import matplotlib.pyplot as plt
plt.axes((-2.5,-0.05,3,1))
plt.plot(positive_projections,np.zeros(len(positive_projections)),'r+')
plt.plot(negative_projections,np.zeros(len(negative_projections)),'b-')
plt.show()
# In[19]:
#Fitting the reduced clusters into Gauss Normal Distributions
mu_p = np.mean(positive_projections)
std_p = np.std(positive_projections)
mu_n = np.mean(negative_projections)
std_n = np.std(negative_projections)
# In[20]:
#Sorting projection data to plot the normal distributions
positive_projections.sort()
negative_projections.sort()
# In[21]:
print("pos_nd_mean =",mu_p,"and pos_nd_std =",std_p)
print("neg_nd_mean =",mu_n,"and neg_nd_std =",std_n)
# In[22]:
import matplotlib.pyplot as plt
# In[23]:
#Solving the quadratic equations of ND1 and ND2 to find the intersection points
def findThreshold(mu1,mu2,std1,std2):
p = 1/(2*std1**2) - 1/(2*std2**2)
q = mu2/(std2**2) - mu1/(std1**2)
r = mu1**2 /(2*std1**2) - mu2**2 / (2*std2**2) - np.log(std2/std1)
return np.roots([p,q,r])
# In[24]:
#Finding the threshold point which should be between the means of ND1 and ND2
intersection = findThreshold(mu_p,mu_n,std_p,std_n)
threshold = 0
for i in range(len(intersection)):
if (mu_p < intersection[i]) and (mu_n > intersection[i]):
threshold = intersection[i]
break
print("Threshold is",threshold)
# In[25]:
#Plotting the normal distributions and the discriminant line in 1D
from scipy.stats import norm
y1 = norm.pdf(positive_projections,mu_p,std_p)
plt.plot(positive_projections,y1,color='red')
y2 = norm.pdf(negative_projections,mu_n,std_n)
plt.plot(negative_projections,y2,color='blue')
plt.plot(threshold,0,marker = 'o',markersize=3,color='green')
plt.axvline(x=threshold, color='green')
# In[26]:
#Calculating accuracy
correct_pclass = 0
for i in range(len(positive)):
if np.dot(w,np.transpose(positive[i])) < threshold:
correct_pclass += 1
correct_nclass = 0
for i in range(len(negative)):
if np.dot(w,np.transpose(negative[i])) > threshold:
correct_nclass += 1
accuracy = (correct_pclass + correct_nclass)/(len(positive) + len(negative))
print("Accuracy is",accuracy)
# In[27]:
#Plotting higher dimensional data and discriminant in oriignal space
Xp = positive[:,0]
Yp = positive[:,1]
Zp = positive[:,2]
# In[28]:
Xn = negative[:,0]
Yn = negative[:,1]
Zn = negative[:,2]
# In[29]:
import matplotlib.pyplot as mpp
mpp.rcParams['figure.figsize'] = (8,8)
mpp.rcParams['figure.dpi'] = 100
# In[30]:
fig = plt.figure()
ax = fig.add_subplot(111,projection = '3d')
ax.scatter(Xp,Yp,Zp,c = 'r',marker='.')
ax.scatter(Xn,Yn,Zn,c = 'b',marker='.')
a = w[0]
b = w[1]
c = w[2]
xx = np.linspace(-10,10,100)
yy = np.linspace(-10,10,100)
xx, yy = np.meshgrid(xx, yy)
z = (intersection[1] - a*xx - b*yy)/c
ax.plot_surface(xx, yy, z, alpha=0.4,color = 'green')
ax.set_xlabel('x-axis')
ax.set_ylabel('y-axis')
ax.set_zlabel('z-axis')
plt.show()
# In[31]:
#Printing Final results
print("Unit vector w :",w)
print("Threshold in 1D =",threshold)
print("Accuracy :",accuracy)
|
import string
import sys
import myLib.mySSH as ssh
hostname = 'ev3dev'
port = 22
username = 'robot'
password = 'maker'
client = ssh.connectionSSH(hostname, port, username, password)
isConnected = True
try:
while isConnected:
command = input('Enter command here \n')
command = command.replace('\r', '')
if (command == 'a'):
isConnected = False
elif (command == 'z'):
ssh.commandSSH(client, './forward.sh')
elif (command == 's'):
ssh.commandSSH(client, './backward.sh')
elif (command == 'q'):
ssh.commandSSH(client, './left.sh')
elif (command == 'd'):
ssh.commandSSH(client, './right.sh')
elif (command == 'e'):
ssh.commandSSH(client, './hold.sh')
elif (command == 'r'):
ssh.commandSSH(client, './release.sh')
else:
print ('Command not found')
finally:
ssh.closeSSH(client) |
#!/usr/bin/env python3
import sympy
import numpy as np
from scipy.constants import physical_constants
import scipy.sparse as sp
import scipy.sparse.linalg as sla
from types import SimpleNamespace
import kwant
from kwant.continuum import discretize, sympify
import kwant.linalg.mumps as mumps
import time
import argparse
# ****************** general constants and globals *******************
c = physical_constants['speed of light in vacuum'][0]
val_hbar = physical_constants['Planck constant over 2 pi in eV s'][0]
val_m0 = physical_constants['electron mass energy equivalent in MeV'][0]
val_m0 = val_m0 / (c*10**9)**2 * 10**6
val_mu_B = physical_constants['Bohr magneton in eV/T'][0]
val_phi_0 = physical_constants['mag. flux quantum'][0] * (10**9)**2
constants = {
'm_0': val_m0,
'phi_0': val_phi_0,
'mu_B': val_mu_B,
'hbar': val_hbar
}
def eigsh(matrix, k, sigma, **kwargs):
"""Call sla.eigsh with mumps support.
Please see scipy.sparse.linalg.eigsh for documentation.
"""
class LuInv(sla.LinearOperator):
def __init__(self, A):
inst = mumps.MUMPSContext()
#inst.analyze(A, ordering='pord')
inst.factor(A)
self.solve = inst.solve
sla.LinearOperator.__init__(self, A.dtype, A.shape)
def _matvec(self, x):
return self.solve(x.astype(self.dtype))
opinv = LuInv(matrix - sigma * sp.identity(matrix.shape[0]))
return sla.eigsh(matrix, k, sigma=sigma, OPinv=opinv, **kwargs)
def get_hexagon(width):
def hexagon(pos):
a0 = 0.25*width
b0 = 0.5*np.sin(np.pi/3.0)*width
(x, y) = pos
return (y >- b0 and y < b0 and
y > -(b0/a0) * (2*a0 - x) and
y < -(b0/a0) * (x - 2*a0) and
y < (b0/a0) * (x + 2*a0) and
y > -(b0/a0) * (x + 2*a0))
return hexagon
def make_syst(a, width):
kx, ky, kz = kwant.continuum.momentum_operators
ham_rashba = kwant.continuum.sympify('''
B*g*mu_B/2*sigma_z+(hbar**2/(2*m_0)*(k_x/m*k_x+k_y/m*k_y+k_z/m*k_z)-V(x, y))*eye(2)+
alpha_z*(k_x*sigma_y-k_y*sigma_x)+alpha_y*(k_z*sigma_x-k_x*sigma_z)+alpha_x*(k_y*sigma_z-k_z*sigma_y)
''')
modified = ['V(x, y)']
ham_rashba = ham_rashba.subs({ kx : kwant.continuum.sympify('k_x-pi/phi_0*B*y')})
ham1 = ham_rashba
ham2 = - ham_rashba.conjugate().subs({s: -s for s in [kx, ky, kz]})
hamiltonian = sympy.Matrix(sympy.BlockDiagMatrix(ham1, ham2))
hamiltonian[0, 3] = hamiltonian[3, 0] = + kwant.continuum.sympify('Delta_SC')
hamiltonian[1, 2] = hamiltonian[2, 1] = - kwant.continuum.sympify('Delta_SC')
subs = {s.conjugate(): s for s in hamiltonian.atoms(sympy.Symbol)}
subs.update({kwant.continuum.sympify(m).conjugate() : kwant.continuum.sympify(m) for m in modified})
hamiltonian = hamiltonian.subs(subs)
template = discretize(hamiltonian, ('x', 'y'), grid_spacing=a)
wire = kwant.Builder()
shape = get_hexagon(width)
wire.fill(template, lambda s: shape([s.pos[0], s.pos[1]]), (0, 0))
return wire.finalized()
def Hk(syst, **kwargs):
pars = dict(
k_z=0.,
B=0.5,
m=0.026,
alpha_z=0.03,
alpha_x=-0.005,
alpha_y=-0.01,
g=-15.,
V=lambda x, y: 0.05,
Delta_SC=0.2e-3,
)
pars.update(constants)
pars.update(kwargs)
return sp.csc_matrix(syst.hamiltonian_submatrix(params=pars, sparse=True))
parser = argparse.ArgumentParser()
parser.add_argument("--latc", type=float, default=0.1)
args = parser.parse_args()
syst = make_syst(args.latc, width=100)
print('Construction time: ', end='')
t_start = time.time()
H = Hk(syst, k_z=0.1)
print(time.time() - t_start)
print('Hamiltonian size: ', H.shape[0])
print('Nonzero elements: ', H.nnz)
print('Diagonalization time with mumps: ', end='')
t_start = time.time()
E, V = eigsh(H, sigma=0, k=8, tol=1e-6)
print(time.time() - t_start)
print('Diagonalization time without mumps: ', end='')
t_start = time.time()
E, V = sla.eigsh(H, sigma=0, k=8, tol=1e-6)
print(time.time() - t_start)
E_max = np.max(E) + 1e-10
print('Diagonalization time with FEAST: ', end='')
import feast
t_start = time.time()
x = feast.zfeast_hcsrev(H, Emin=-E_max, Emax=E_max, k=10)
print(time.time() - t_start)
|
import pandas as pd
import config
# Load data dictionary
xls = pd.ExcelFile(config.DATA_DICTIONARY)
# Subset the Excel by selecting relevant columns
df = pd.read_excel(xls, "Data Dictionary (stewards)")[["Name of the ERN",
"Name of the grouping item",
"Name of the Data Element",
"Explanatory description",
"Data type or list of permitted values",
"Example value",
"Comments"]]
# Subset raw data by choosing the first 100 rows and one column "Name of the Data Element"
df_test = df[["Name of the Data Element"]][0:100]
print(df_test)
# Store data as a pickle file (in order to avoid re-run this script for each test)
df_test.to_pickle("Data/my_test_df.pickle")
|
def rev_str(x):
n = len(x)
b = [x[i] for i in range(n-1, -1, -1)]
y = ""
for i in b:
y += i
print (y)
return y
resp = 'y'
while resp == "y":
x = input("Enter string: ")
y = rev_str(x)
resp = input("Conti? (y/n): ").lower() |
'''
Messages to be received by sum nodes
'''
def increment(amount):
'''
:param int amount: An integer amount by which to increment.
'''
return {'type': 'increment', 'amount': amount}
|
"""
03: FOOOF Algorithm
===================
A step by step overview of the FOOOF algorithm.
Note that this notebook is for demonstrative purposes, and does not
represent recommended usage of the FOOOF module.
"""
###################################################################################################
# Algorithmic Description
# -----------------------
#
# Briefly, the algorithm proceeds as such:
#
# - An initial fit of the aperiodic signal is taken across the power spectrum
# - This aperiodic fit is subtracted from the power spectrum, creating a flattened spectrum
# - Peaks are iteratively found in this flattened spectrum
# - A full peak fit is created of all peak candidates found
# - The peak fit is subtracted from the original power spectrum,
# creating a peak-removed power spectrum
# - A final fit of the aperiodic signal is taken of the peak-removed power spectrum
###################################################################################################
# General imports
import numpy as np
import matplotlib.pyplot as plt
# Import the FOOOF object
from fooof import FOOOF
# Import a function to generate synthetic power spectra
from fooof.synth.gen import gen_aperiodic
# Import some internal functions from FOOOF
# Note that these are used here for demonstration: - you do not need to import them to run FOOOF
from fooof.core.funcs import gaussian_function
from fooof.plts.spectra import plot_spectrum
from fooof.plts.fm import plot_peak_iter
###################################################################################################
# Set whether to plot in log-log space (used across the whole notebook)
plt_log = False
###################################################################################################
# Load example data
freqs = np.load('./dat/freqs_2.npy')
spectrum = np.load('./dat/spectrum_2.npy')
###################################################################################################
# Initialize a FOOOF object, with some settings
fm = FOOOF(peak_width_limits=[1, 8], max_n_peaks=6 , min_peak_height=0.15)
###################################################################################################
#
# Note that data can be added to FOOOF independent of fitting the model.
# You can then plot input data.
#
###################################################################################################
# Add data to FOOOF object
fm.add_data(freqs, spectrum, [3, 40])
###################################################################################################
# Plot the power spectrum that we just created
fm.plot(plt_log)
###################################################################################################
#
# The FOOOF object stores most of the intermediate steps internally.
#
# For this notebook, we will first fit the full model, as normal, but then step through,
# and visualize each step the algorithm takes to come to that final fit.
#
# Fit the FOOOF model
fm.fit(freqs, spectrum, [3, 40])
###################################################################################################
# Do an initial aperiodic signal fit - a robust fit, that excludes outliers
# This recreates an initial fit that isn't ultimately stored in the FOOOF object)
init_ap_fit = gen_aperiodic(fm.freqs, fm._robust_ap_fit(fm.freqs, fm.power_spectrum))
# Plot the initial aperiodic fit
_, ax = plt.subplots(figsize=(12, 10))
plot_spectrum(fm.freqs, fm.power_spectrum, plt_log, label='Original Power Spectrum', ax=ax)
plot_spectrum(fm.freqs, init_ap_fit, plt_log, label='Initial Aperiodic Fit', ax=ax)
###################################################################################################
#
# The initial fit, as above, is used to create a flattened spectrum,
# from which peaks can be extracted.
#
###################################################################################################
# Flatten the power spectrum, by subtracting out the initial aperiodic fit
plot_spectrum(fm.freqs, fm._spectrum_flat, plt_log, label='Flattened Spectrum')
###################################################################################################
#
# With the flattened spectrum, FOOOF then initiates an iterative procedure to find peaks.
#
# For each iteration:
#
# - The maximum point of the flattened spectrum is found.
#
# - If this point fails to pass the relative or absolute height threshold,
# the procedure halts.
# - A Gaussian is fit around this maximum point
# - This 'guess' Gaussian is then subtracted from the flatted spectrum
# - The procedure continues to a new iteration with the new version of the flattend spectrum,
# unless `max_n_peaks` has been reached
#
###################################################################################################
# Plot the iterative approach to finding peaks from the flattened spectrum
plot_peak_iter(fm)
###################################################################################################
#
# Once the iterative procedure has halted, the extracted 'guess' peaks,
# are then re-fit, all together, to the flattened spectrum, creating a peak fit.
#
###################################################################################################
# Fit gaussians to all candidate peaks together, and create peak fit
plot_spectrum(fm.freqs, fm._peak_fit, plt_log)
###################################################################################################
#
# This is now the peak component of the fit completed. This fit is then used to go
# back and try and get a better aperiodic fit.
#
# To do so, the peak fit is removed from the original power spectrum,
# leaving an 'aperiodic-only' spectrum for re-fitting.
#
###################################################################################################
# Create peak removed power spectrum (by removing peak fit from original spectrum)
plot_spectrum(fm.freqs, fm._spectrum_peak_rm, plt_log, label='Peak Removed Spectrum')
###################################################################################################
# Fit the final aperiodic fit on the peak removed power spectrum
_, ax = plt.subplots(figsize=(12, 10))
plot_spectrum(fm.freqs, fm._spectrum_peak_rm, plt_log, label='Peak Removed Spectrum', ax=ax)
plot_spectrum(fm.freqs, fm._ap_fit, plt_log, label='Final Aperiodic Fit', ax=ax)
###################################################################################################
# The aperiodic fit component of the model is now also complete.
# The two components can now be combined.
#
###################################################################################################
# Recreate the full FOOOF model, by combining the peak and aperiodic fits
plot_spectrum(fm.freqs, fm.fooofed_spectrum_, plt_log, label='Full Model')
###################################################################################################
# The last stage is to calculate the fit error, R^2, and update gaussian parameters -> peak parameters
# These results are part of what are stored, and printed, as the model results
fm.print_results()
###################################################################################################
# Plot the full model fit of the power spectrum
# The final fit (red), and aperiodic fit (blue), are the same as we plotted above
fm.plot(plt_log)
|
__author__ = 'mwn'
import os
import sys
import logging
from logging.handlers import RotatingFileHandler
from flask import Flask, render_template
app = Flask(__name__)
app.config.from_object('config')
handler = RotatingFileHandler('yapki.log', maxBytes=10000, backupCount=1)
handler.setLevel(logging.DEBUG)
app.logger.addHandler(handler)
########################
# Configure Secret Key #
########################
def install_secret_key(app, filename='secret_key'):
"""Configure the SECRET_KEY from a file
in the instance directory.
If the file does not exist, print instructions
to create it from a shell with a random key,
then exit.
"""
filename = os.path.join(app.instance_path, filename)
try:
app.config['SECRET_KEY'] = open(filename, 'rb').read()
except IOError:
print('Error: No secret key. Create it with:')
full_path = os.path.dirname(filename)
if not os.path.isdir(full_path):
print('mkdir -p {filename}'.format(filename=full_path))
print('head -c 24 /dev/urandom > {filename}'.format(filename=filename))
sys.exit(1)
if not app.config['DEBUG']:
install_secret_key(app)
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
@app.after_request
def after_request(response):
response.headers.add('X-Test', 'This is only test.')
response.headers.add('Access-Control-Allow-Origin', '*') # TODO: set to real origin
return response
from app.web.controller import webBp
app.register_blueprint(webBp)
from app.rest.controller import restBp
app.register_blueprint(restBp)
|
from .config import get_cors
from .controllers import SocketController
from .route_constants import *
def setup_routes(app, sio, game_app,player_ranking_repository):
socket_controller = SocketController(sio, game_app,player_ranking_repository)
sio.on(CONNECT, socket_controller.on_socket_connected)
sio.on(DISCONNECT, socket_controller.on_socket_disconnected)
sio.on(LOGIN, socket_controller.on_socket_login)
sio.on(PLAYERS, socket_controller.get_players)
sio.on(PLAYERS_WAITING, socket_controller.get_players_in_waiting_room)
sio.on(QUESTION, socket_controller.get_question)
sio.on(SCORE, socket_controller.save_question_score)
sio.on(ADD_UNITS, socket_controller.add_units)
sio.on(UNITS_FROM_SHOP, socket_controller.get_shop_units)
sio.on(GET_UNITS, socket_controller.get_units)
sio.on(GET_GOLD, socket_controller.get_gold)
sio.on(UNITS_READY, socket_controller.units_ready)
sio.on(RANKING, socket_controller.ranking)
cors = get_cors(app)
for route in app.router.routes():
if route.resource.canonical == "/socket.io/":
continue
cors.add(route)
|
# coding: utf-8
"""
"""
from .. import db
from .groups import Group
from .users import User
from .objects import Objects
from .permissions import Permissions
__author__ = 'Florian Rhiem <[email protected]>'
class UserObjectPermissions(db.Model):
__tablename__ = 'user_object_permissions'
object_id = db.Column(db.Integer, db.ForeignKey(Objects.object_id_column), nullable=False)
user_id = db.Column(db.Integer, db.ForeignKey(User.id), nullable=False)
permissions = db.Column(db.Enum(Permissions), nullable=False, default=Permissions.NONE)
__table_args__ = (
db.PrimaryKeyConstraint(object_id, user_id),
{},
)
class GroupObjectPermissions(db.Model):
__tablename__ = 'group_object_permissions'
object_id = db.Column(db.Integer, db.ForeignKey(Objects.object_id_column), nullable=False)
group_id = db.Column(db.Integer, db.ForeignKey(Group.id, ondelete="CASCADE"), nullable=False)
permissions = db.Column(db.Enum(Permissions), nullable=False, default=Permissions.NONE)
__table_args__ = (
db.PrimaryKeyConstraint(object_id, group_id),
{},
)
class ProjectObjectPermissions(db.Model):
__tablename__ = 'project_object_permissions'
object_id = db.Column(db.Integer, db.ForeignKey(Objects.object_id_column), nullable=False)
project_id = db.Column(db.Integer, db.ForeignKey('projects.id', ondelete="CASCADE"), nullable=False)
permissions = db.Column(db.Enum(Permissions), nullable=False, default=Permissions.NONE)
__table_args__ = (
db.PrimaryKeyConstraint(object_id, project_id),
{},
)
class AllUserObjectPermissions(db.Model):
__tablename__ = 'all_user_object_permissions'
object_id = db.Column(db.Integer, db.ForeignKey(Objects.object_id_column), primary_key=True)
permissions = db.Column(db.Enum(Permissions), nullable=False, default=Permissions.NONE)
|
"""Dictionary with auto-expiring values for caching purposes."""
from time import time
from threading import RLock
from collections import OrderedDict
class ExpiringDict(OrderedDict):
"""Dictionary with auto-expiring values for caching purposes."""
def __init__(self, max_len, max_age_seconds, callback=None): # noqa
super().__init__(self)
self.max_len = max_len
self.max_age = max_age_seconds
self.callback = callback
self.lock = RLock()
self._safe_keys = lambda: list(self.keys())
def inform(self, key, value, type_of="evicting"):
"""Dispatch callback for ttl items."""
if self.callback is not None:
self.callback(key, value, type_of)
def __contains__(self, key):
"""Return True if the dict has a key, else return False."""
try:
with self.lock:
item = super().__getitem__(key)
if time() - item[1] < self.max_age:
return True
del self[key]
self.inform(key, item[0])
except KeyError:
pass
return False
def __getitem__(self, key, with_age=False):
"""Return the item of the dict.
Raises a KeyError if key is not in the map.
"""
with self.lock:
item = super().__getitem__(key)
item_age = time() - item[1]
if item_age < self.max_age:
if with_age:
return item[0], item_age
return item[0]
del self[key]
self.inform(key, item[0])
# return None
raise KeyError(key)
def __setitem__(self, key, value):
"""Set d[key] to value."""
with self.lock:
if len(self) == self.max_len:
try:
old_key, old_value = self.popitem(last=False)
self.inform(old_key, old_value, type_of="dropping")
except KeyError:
pass
super().__setitem__(key, (value, time()))
def pop(self, key, default=None):
"""Get item from the dict and remove it.
Return default if expired or does not exist. Never raise KeyError.
"""
with self.lock:
try:
item = super().__getitem__(key)
del self[key]
return item[0]
except KeyError:
return default
def ttl(self, key):
"""Return TTL of the `key` (in seconds).
Returns None for non-existent or expired keys.
"""
key_value, key_age = self.get(key, with_age=True) # noqa: E501; pylint: disable=unused-variable
if key_age:
key_ttl = self.max_age - key_age
if key_ttl > 0:
return key_ttl
return None
def get(self, key, default=None, with_age=False):
"""Return the value for key if key is in the dictionary, else default.""" # noqa: E501;
try:
return self.__getitem__(key, with_age)
except KeyError as ex: # noqa
if with_age:
return default, None
return default
def items(self):
"""Return a copy of the dictionary's list of (key, value) pairs."""
# res_list = []
for key in self._safe_keys():
try:
yield (key, self[key])
# res_list.append((key, self[key]))
except KeyError:
pass
# return res_list
def values(self):
"""Return a copy of the dictionary's list of values.
See the note for dict.items().
"""
# res_list = []
for key in self._safe_keys():
try:
yield self[key]
# res_list.append(self[key])
except KeyError:
pass
# return res_list
def fromkeys(self):
"""Create a new dictionary with keys from seq and values set to value.""" # noqa: E501;
raise NotImplementedError()
def iteritems(self):
"""Return an iterator over the dictionary's (key, value) pairs."""
raise NotImplementedError()
def itervalues(self):
"""Return an iterator over the dictionary's values."""
raise NotImplementedError()
def viewitems(self):
"""Return a new view of the dictionary's items ((key, value) pairs)."""
raise NotImplementedError()
def viewkeys(self):
"""Return a new view of the dictionary's keys."""
raise NotImplementedError()
def viewvalues(self):
"""Return a new view of the dictionary's values."""
raise NotImplementedError()
|
class Solution:
def integerBreak(self, n: int) -> int:
dp = [0] * (n + 1)
dp[1] = 1
for i in range(2, n + 1):
for d in range(i - 1, i // 2 - 1, -1):
dp[i] = max(dp[i], max(d, dp[d]) * max(i - d, dp[i - d]))
return dp[n]
|
# Copyright 2015-2017 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from behave import then
from bravado import exception as bexception
from paasta_tools.utils import decompose_job_id
@then('instance GET should return app_count "{app_count}" and an expected number of running instances for "{job_id}"')
def service_instance_status(context, app_count, job_id):
(service, instance, _, __) = decompose_job_id(job_id)
response = context.paasta_api_client.service.status_instance(
instance=instance,
service=service,
).result()
assert response['marathon']['app_count'] == int(app_count), response
@then('instance GET should return chronos desired_state "{desired_state}" for "{job_id}"')
def chronos_service_instance_status(context, desired_state, job_id):
(service, instance, _, __) = decompose_job_id(job_id)
response = context.paasta_api_client.service.status_instance(
instance=instance,
service=service,
).result()
assert response['chronos']['desired_state'] == desired_state, response
@then('instance GET should return error code "{error_code}" for "{job_id}"')
def service_instance_status_error(context, error_code, job_id):
(service, instance, _, __) = decompose_job_id(job_id)
response = None
try:
response = context.paasta_api_client.service.status_instance(
instance=instance,
service=service,
).result()
except bexception.HTTPError as exc:
assert exc.status_code == int(error_code)
assert not response
@then('resources GET should show "{resource}" has {used} used')
def resources_resource_used(context, resource, used):
used = float(used)
response = context.paasta_api_client.resources.resources().result()
assert response[0][resource]['used'] == used, response
@then('resources GET with groupings "{groupings}" and filters "{filters}" should return {num:d} groups')
def resources_groupings_filters(context, groupings, filters, num):
groupings = groupings.split(",")
if len(filters) > 0:
filters = filters.split("|")
response = context.paasta_api_client.resources.resources(groupings=groupings, filter=filters).result()
assert len(response) == num, response
@then('resources GET with groupings "{groupings}" should return {num:d} groups')
def resources_groupings(context, groupings, num):
return resources_groupings_filters(context, groupings, [], num)
@then('marathon_dashboard GET should return "{service}.{instance}" in cluster "{cluster}" with shard {shard:d}')
def marathon_dashboard(context, service, instance, cluster, shard):
response = context.paasta_api_client.marathon_dashboard.marathon_dashboard().result()
dashboard = response[cluster]
shard_url = context.system_paasta_config.get_dashboard_links()[cluster]['Marathon RO'][shard]
for marathon_dashboard_item in dashboard:
if marathon_dashboard_item['service'] == service and marathon_dashboard_item['instance'] == instance:
assert marathon_dashboard_item['shard_url'] == shard_url
|
"""
BSD 3-Clause License
Copyright (c) 2018, Maël Kimmerlin, Aalto University, Finland
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from aiohttp import web
import json
import logging
import uuid
import functools
import traceback
from common import amqp_client
import data_container
import api_interface
async def process(req, callback, url_args = [], required_args = [],
opt_args = []):
req_body = {}
try:
arguments = {"data_store": req.app["data"], "amqp":req.app["amqp"]}
# Get the arguments from the body
if req.method == "POST":
if req.content_type == "application/x-www-form-urlencoded":
req_body = dict(await req.post())
elif req.content_type == "application/json":
req_body = await req.json()
# If no body but required arguments, return 400
if not req_body and required_args:
raise web.HTTPBadRequest( content_type="plain/text",
text= " ".join(["A JSON body is expected with the required",
"parameters : \n{}\nand the".format(required_args),
"optional parameters :\n{}\n".format(opt_args)
])
)
# Required arguments. If missing, return 400
for k in required_args:
if k not in req_body:
raise web.HTTPBadRequest( content_type="plain/text",
text=" ".join(["{} is a required parameter".format(k),
"\nA JSON body is expected with the required parameters",
": \n{}\n and the optional".format(required_args),
"parameters :\n{}\n".format(opt_args)
])
)
arguments[k] = req_body[k]
# Optional arguments (None keeps the arguments in correct position)
for k in opt_args:
if k not in req_body:
continue
arguments[k] = req_body[k]
# URL arguments, the arguments are defined in the route. if the
# argument is not present, this means that there is an argument in
# url_args that is not in the route, otherwise the routing wouldn't
# take us here and would probably return a 404. So internal error.
for k in url_args:
try:
arguments[k] = req.match_info.get(k)
except:
raise HTTPInternalServerError(text="Unknown error")
# Callback
resp = await callback(**arguments)
if isinstance(resp, web.Response):
raise resp
except Exception as e:
if isinstance(e, web.Response):
del arguments["amqp"]
del arguments["data_store"]
logging.debug(" ".join(["REST API : {}".format(req.method),
"endpoint {} called with parameters".format(req.path),
"{} and returned {}".format( arguments, e.status)
]))
return e
else:
logging.error(traceback.format_exc())
del arguments["amqp"]
del arguments["data_store"]
logging.error(" ".join(["REST API : {}".format(req.method),
"endpoint {} called with parameters".format(req.path),
"{} and didn't complete properly".format( arguments)
]))
return web.HTTPInternalServerError(text="Unknown error")
def set_rest_routes(router):
for route_dict in api_interface.api_mappings:
if route_dict["method"] == "GET":
router_func = router.add_get
elif route_dict["method"] == "POST":
router_func = router.add_post
elif route_dict["method"] == "PUT":
router_func = router.add_put
elif route_dict["method"] == "DELETE":
router_func = router.add_delete
else:
continue
router_func(route_dict["endpoint"], functools.partial(process,
callback = route_dict["callback"],
url_args = route_dict["url_args"],
required_args = route_dict["required_args"],
opt_args = route_dict["opt_args"]
)
)
async def build_server(loop, address, port, data_store, amqp_client):
app = web.Application(loop=loop)
app['data'] = data_store
app['amqp'] = amqp_client
set_rest_routes(app.router)
return await loop.create_server(app.make_handler(), address, port)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.