code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# coding: utf-8
#
# Copyright 2010-2014 Ning, Inc.
# Copyright 2014-2020 Groupon, Inc
# Copyright 2020-2021 Equinix, Inc
# Copyright 2014-2021 The Billing Project, LLC
#
# The Billing Project, LLC licenses this file to you under the Apache License, version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Kill Bill
Kill Bill is an open-source billing and payments platform # noqa: E501
OpenAPI spec version: 0.22.22-SNAPSHOT
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TenantKeyValue(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'key': 'Str',
'values': 'List[Str]'
}
attribute_map = {
'key': 'key',
'values': 'values'
}
def __init__(self, key=None, values=None): # noqa: E501
"""TenantKeyValue - a model defined in Swagger""" # noqa: E501
self._key = None
self._values = None
self.discriminator = None
if key is not None:
self.key = key
if values is not None:
self.values = values
@property
def key(self):
"""Gets the key of this TenantKeyValue. # noqa: E501
:return: The key of this TenantKeyValue. # noqa: E501
:rtype: Str
"""
return self._key
@key.setter
def key(self, key):
"""Sets the key of this TenantKeyValue.
:param key: The key of this TenantKeyValue. # noqa: E501
:type: Str
"""
self._key = key
@property
def values(self):
"""Gets the values of this TenantKeyValue. # noqa: E501
:return: The values of this TenantKeyValue. # noqa: E501
:rtype: List[Str]
"""
return self._values
@values.setter
def values(self, values):
"""Sets the values of this TenantKeyValue.
:param values: The values of this TenantKeyValue. # noqa: E501
:type: List[Str]
"""
self._values = values
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TenantKeyValue):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| killbill/killbill-client-python | killbill/models/tenant_key_value.py | Python | apache-2.0 | 4,248 |
import random
from settings import *
from mongoengine import *
from faker import Faker
from models import UserModel, StoryModel
connect('timehighway', host=MONGO_HOST, port=MONGO_PORT,
username=MONGO_USERNAME, password=MONGO_PASSWORD)
fake = Faker()
USER_NUMBERS = 100
STORY_NUMBERS = 1000
STORIES = [
StoryModel(
name="story_num%d" % i,
desc=fake.text(),
).save() for i in range(STORY_NUMBERS)
]
def fake_user():
for i in range(USER_NUMBERS):
u = UserModel(
firstname=fake.first_name(),
lastname=fake.last_name(),
email=fake.email(),
password="ABCD"*4,
)
u.stories = [random.choice(STORIES) for i in range(random.randint(1, 4))]
u.save()
if __name__ == '__main__':
u = UserModel(
firstname='vahid',
lastname='kharazi',
email='[email protected]',
)
pass_hash = pwd_context.encrypt('123qwe')
u.password = pass_hash
u.stories = [random.choice(STORIES) for i in range(random.randint(1, 4))]
u.save()
fake_user() | meahmadi/time-highway | time-highway/api/test_data.py | Python | gpl-2.0 | 963 |
from __future__ import division
from collections import OrderedDict
import numpy as np
from mbuild import Box
from mbuild.utils.conversion import RB_to_OPLS
from mbuild.utils.sorting import natural_sort
__all__ = ['write_lammpsdata']
def write_lammpsdata(structure, filename):
"""Output a LAMMPS data file.
Outputs a LAMMPS data file in the 'full' atom style format. Assumes use
of 'real' units. See http://lammps.sandia.gov/doc/atom_style.html for
more information on atom styles.
Parameters
----------
structure : parmed.Structure
ParmEd structure object
filename : str
Path of the output file
Notes
-----
See http://lammps.sandia.gov/doc/2001/data_format.html for a full description
of the LAMMPS data format. Currently the following sections are supported (in
addition to the header): *Masses*, *Nonbond Coeffs*, *Bond Coeffs*, *Angle
Coeffs*, *Dihedral Coeffs*, *Atoms*, *Bonds*, *Angles*, *Dihedrals*
"""
xyz = np.array([[atom.xx,atom.xy,atom.xz] for atom in structure.atoms])
forcefield = True
if structure[0].type == '':
forcefield = False
box = Box(lengths=np.array([structure.box[0], structure.box[1], structure.box[2]]))
if forcefield:
types = [atom.type for atom in structure.atoms]
else:
types = [atom.name for atom in structure.atoms]
unique_types = list(set(types))
unique_types.sort(key=natural_sort)
charges = [atom.charge for atom in structure.atoms]
bonds = [[bond.atom1.idx+1, bond.atom2.idx+1] for bond in structure.bonds]
angles = [[angle.atom1.idx+1,
angle.atom2.idx+1,
angle.atom3.idx+1] for angle in structure.angles]
dihedrals = [[dihedral.atom1.idx+1,
dihedral.atom2.idx+1,
dihedral.atom3.idx+1,
dihedral.atom4.idx+1] for dihedral in structure.rb_torsions]
if bonds:
if len(structure.bond_types) == 0:
bond_types = np.ones(len(bonds),dtype=int)
else:
unique_bond_types = dict(enumerate(set([(round(bond.type.k,3),
round(bond.type.req,3)) for bond in structure.bonds])))
unique_bond_types = OrderedDict([(y,x+1) for x,y in unique_bond_types.items()])
bond_types = [unique_bond_types[(round(bond.type.k,3),
round(bond.type.req,3))] for bond in structure.bonds]
if angles:
unique_angle_types = dict(enumerate(set([(round(angle.type.k,3),
round(angle.type.theteq,3)) for angle in structure.angles])))
unique_angle_types = OrderedDict([(y,x+1) for x,y in unique_angle_types.items()])
angle_types = [unique_angle_types[(round(angle.type.k,3),
round(angle.type.theteq,3))] for angle in structure.angles]
if dihedrals:
unique_dihedral_types = dict(enumerate(set([(round(dihedral.type.c0,3),
round(dihedral.type.c1,3),
round(dihedral.type.c2,3),
round(dihedral.type.c3,3),
round(dihedral.type.c4,3),
round(dihedral.type.c5,3),
round(dihedral.type.scee,1),
round(dihedral.type.scnb,1)) for dihedral in structure.rb_torsions])))
unique_dihedral_types = OrderedDict([(y,x+1) for x,y in unique_dihedral_types.items()])
dihedral_types = [unique_dihedral_types[(round(dihedral.type.c0,3),
round(dihedral.type.c1,3),
round(dihedral.type.c2,3),
round(dihedral.type.c3,3),
round(dihedral.type.c4,3),
round(dihedral.type.c5,3),
round(dihedral.type.scee,1),
round(dihedral.type.scnb,1))] for dihedral in structure.rb_torsions]
with open(filename, 'w') as data:
data.write(filename+' - created by mBuild\n\n')
data.write('{:d} atoms\n'.format(len(structure.atoms)))
data.write('{:d} bonds\n'.format(len(bonds)))
data.write('{:d} angles\n'.format(len(angles)))
data.write('{:d} dihedrals\n\n'.format(len(dihedrals)))
data.write('{:d} atom types\n'.format(len(set(types))))
if bonds:
data.write('{:d} bond types\n'.format(len(set(bond_types))))
if angles:
data.write('{:d} angle types\n'.format(len(set(angle_types))))
if dihedrals:
data.write('{:d} dihedral types\n'.format(len(set(dihedral_types))))
data.write('\n')
# Box data
for i,dim in enumerate(['x','y','z']):
data.write('{0:.6f} {1:.6f} {2}lo {2}hi\n'.format(box.mins[i],box.maxs[i],dim))
# Mass data
masses = [atom.mass for atom in structure.atoms]
mass_dict = dict([(unique_types.index(atom_type)+1,mass) for atom_type,mass in zip(types,masses)])
data.write('\nMasses\n\n')
for atom_type,mass in mass_dict.items():
data.write('{:d}\t{:.6f}\t# {}\n'.format(atom_type,mass,unique_types[atom_type-1]))
if forcefield:
# Pair coefficients
epsilons = [atom.epsilon for atom in structure.atoms]
sigmas = [atom.sigma for atom in structure.atoms]
epsilon_dict = dict([(unique_types.index(atom_type)+1,epsilon) for atom_type,epsilon in zip(types,epsilons)])
sigma_dict = dict([(unique_types.index(atom_type)+1,sigma) for atom_type,sigma in zip(types,sigmas)])
data.write('\nPair Coeffs # lj\n\n')
for idx,epsilon in epsilon_dict.items():
data.write('{}\t{:.5f}\t{:.5f}\n'.format(idx,epsilon,sigma_dict[idx]))
# Bond coefficients
if bonds:
data.write('\nBond Coeffs # harmonic\n\n')
for params,idx in unique_bond_types.items():
data.write('{}\t{}\t{}\n'.format(idx,*params))
# Angle coefficients
if angles:
data.write('\nAngle Coeffs # harmonic\n\n')
for params,idx in unique_angle_types.items():
data.write('{}\t{}\t{:.5f}\n'.format(idx,*params))
# Dihedral coefficients
if dihedrals:
data.write('\nDihedral Coeffs # opls\n\n')
for params,idx in unique_dihedral_types.items():
opls_coeffs = RB_to_OPLS(params[0],
params[1],
params[2],
params[3],
params[4],
params[5])
data.write('{}\t{:.5f}\t{:.5f}\t{:.5f}\t{:.5f}\n'.format(idx,*opls_coeffs))
# Atom data
data.write('\nAtoms\n\n')
for i,coords in enumerate(xyz):
data.write('{:d}\t{:d}\t{:d}\t{:.6f}\t{:.6f}\t{:.6f}\t{:.6f}\n'.format(i+1,0,unique_types.index(types[i])+1,charges[i],*coords))
# Bond data
if bonds:
data.write('\nBonds\n\n')
for i,bond in enumerate(bonds):
data.write('{:d}\t{:d}\t{:d}\t{:d}\n'.format(i+1,bond_types[i],bond[0],bond[1]))
# Angle data
if angles:
data.write('\nAngles\n\n')
for i,angle in enumerate(angles):
data.write('{:d}\t{:d}\t{:d}\t{:d}\t{:d}\n'.format(i+1,angle_types[i],angle[0],angle[1],angle[2]))
# Dihedral data
if dihedrals:
data.write('\nDihedrals\n\n')
for i,dihedral in enumerate(dihedrals):
data.write('{:d}\t{:d}\t{:d}\t{:d}\t{:d}\t{:d}\n'.format(i+1,dihedral_types[i],dihedral[0],dihedral[1],dihedral[2],dihedral[3]))
| ctk3b/mbuild | mbuild/formats/lammpsdata.py | Python | mit | 8,356 |
import os
import lmdb
import logging
import functools
import contextlib
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.reactor as s_react
import synapse.lib.lmdb as s_lmdb
import synapse.lib.tufo as s_tufo
import synapse.lib.cache as s_cache
import synapse.lib.const as s_const
import synapse.lib.scope as s_scope
import synapse.lib.config as s_config
import synapse.lib.msgpack as s_msgpack
logger = logging.getLogger(__name__)
authver = (0, 0, 1)
class Rules:
'''
Rules provides an abstraction for metadata
based filtration of events and tufos.
Each "rule" is a tuple of:
(allow, perm): (bool, (str, dict))
'''
def __init__(self, rules):
self._r_rules = rules
self._r_match = s_cache.MatchCache()
self._r_rules_by_perm = s_cache.Cache(onmiss=self._onRulesPermMiss)
def _onRulesPermMiss(self, name):
retn = []
for rule in self._r_rules:
if self._r_match.match(name, rule[1][0]):
retn.append(rule)
return retn
def _cmprule(self, rule, perm):
for prop, must in rule[1][1].items():
valu = perm[1].get(prop)
if valu is None:
return False
if not self._r_match.match(valu, must):
return False
return True
def allow(self, perm):
'''
Returns True if the given perm/info is allowed by the rules.
Args:
perm ((str,dict)): The requested permission tuple
Returns:
(bool): True if the rules allow the perm/info
'''
for rule in self._r_rules_by_perm.get(perm[0]):
if self._cmprule(rule, perm):
return rule[0]
return False
def whoami():
'''
Return the name of the current synapse user for this thread.
Example:
name = s_auth.whoami()
'''
return s_scope.get('syn:user', 'root@localhost')
@contextlib.contextmanager
def runas(user):
'''
Construct and return a with-block object which runs as the given
synapse user name.
Example:
import synapse.lib.auth as s_auth
s_auth.runas('visi@localhost'):
# calls from here down may use check user/perms
dostuff()
'''
with s_scope.enter({'syn:user': user}):
yield
def reqAdmin(f, attr='auth'):
'''
A Decorator to wrap a function to require it to be executed in a admin user context.
Args:
f: Function being wrapped.
attr (str): Name of Auth local.
Notes:
This decorator should only be placed on methods on a class since it relies
on having access to a local instance of a Auth object.
Returns:
Function results.
Raises:
s_exc.ReqConfOpt: If the auth local is not found on the object.
s_exc.NoSuchUser: If the Auth local does not have a instance of the current user.
s_exc.AuthDeny: If the user in scope is not a admin user.
'''
@functools.wraps(f)
def _f(*args, **kwargs):
auth = getattr(args[0], attr, None) # type: s_auth.Auth
if not auth:
raise s_exc.ReqConfOpt(mesg='requires attr on local object',
attr=attr)
uname = whoami()
uobj = auth.reqUser(uname)
if not uobj.admin:
raise s_exc.AuthDeny(mesg='Operation requires admin',
name=f.__qualname__, user=uname)
logger.info('Executing [%s][%s][%s] as [%s]',
f.__qualname__, args, kwargs, uname)
return f(*args, **kwargs)
return _f
class AuthMixin:
def __init__(self, auth):
'''
A mixin that can be used to provide a helper for remote access to an
Auth object. The API endpoint ``authReact()`` can be used to manipulate
the Auth object by only allowing admin users to perform actions.
Args:
auth (Auth): An auth instance. This is set to ``self.auth``.
'''
self.auth = auth
self._mxrtor = s_react.Reactor()
self._mxrtor.act('auth:get:users', self.__authGetUsers)
self._mxrtor.act('auth:get:roles', self.__authGetRoles)
# User actions
self._mxrtor.act('auth:add:user', self.__authAddUser)
self._mxrtor.act('auth:del:user', self.__authDelUser)
self._mxrtor.act('auth:req:user', self.__authReqUser)
self._mxrtor.act('auth:add:urole', self.__authAddUserRole)
self._mxrtor.act('auth:add:urule', self.__authAddUserRule)
self._mxrtor.act('auth:del:urole', self.__authDelUserRole)
self._mxrtor.act('auth:del:urule', self.__authDelUserRule)
# User admin actions
self._mxrtor.act('auth:add:admin', self.__authAddAdmin)
self._mxrtor.act('auth:del:admin', self.__authDelAdmin)
# Role actions
self._mxrtor.act('auth:req:role', self.__authReqRole)
self._mxrtor.act('auth:add:role', self.__authAddRole)
self._mxrtor.act('auth:del:role', self.__authDelRole)
self._mxrtor.act('auth:add:rrule', self.__authAddRoleRule)
self._mxrtor.act('auth:del:rrule', self.__authDelRoleRule)
def authReact(self, mesg):
'''
General interface for interfacing with Auth via messages.
Args:
mesg ((str, dict)): A message we react to.
Returns:
(bool, ((str, dict))): isok, retn tuple.
'''
try:
isok, retn = self._mxrtor.react(mesg)
except Exception as e:
logger.exception('Failed to process mesg [%s]', mesg)
retn = s_common.getexcfo(e)
isok = False
finally:
return isok, retn
@reqAdmin
def __authGetUsers(self, mesg):
mname, mdict = mesg
users = self.auth.getUsers()
ret = (mname, {'users': users})
return True, ret
@reqAdmin
def __authGetRoles(self, mesg):
mname, mdict = mesg
roles = self.auth.getRoles()
ret = (mname, {'roles': roles})
return True, ret
@reqAdmin
def __authReqUser(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
uobj = self.auth.reqUser(name)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
@reqAdmin
def __authReqRole(self, mesg):
mname, mdict = mesg
name = mdict.get('role')
robj = self.auth.reqRole(name)
ret = s_tufo.tufo(mname, role=(name, robj._getAuthData()))
return True, ret
@reqAdmin
def __authAddUser(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
uobj = self.auth.addUser(name)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
@reqAdmin
def __authDelUser(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
_ret = self.auth.delUser(name)
ret = s_tufo.tufo(mname, user=name, deleted=_ret)
return True, ret
@reqAdmin
def __authDelRole(self, mesg):
mname, mdict = mesg
name = mdict.get('role')
_ret = self.auth.delRole(name)
ret = s_tufo.tufo(mname, role=name, deleted=_ret)
return True, ret
@reqAdmin
def __authAddRole(self, mesg):
mname, mdict = mesg
name = mdict.get('role')
robj = self.auth.addRole(name)
ret = s_tufo.tufo(mname, role=(name, robj._getAuthData()))
return True, ret
@reqAdmin
def __authAddUserRule(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
rule = mdict.get('rule')
uobj = self.auth.reqUser(name)
uobj.addRule(rule)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
@reqAdmin
def __authDelUserRule(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
rule = mdict.get('rule')
uobj = self.auth.reqUser(name)
uobj.delRule(rule)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
@reqAdmin
def __authAddRoleRule(self, mesg):
mname, mdict = mesg
name = mdict.get('role')
rule = mdict.get('rule')
robj = self.auth.reqRole(name)
robj.addRule(rule)
ret = s_tufo.tufo(mname, role=(name, robj._getAuthData()))
return True, ret
@reqAdmin
def __authDelRoleRule(self, mesg):
mname, mdict = mesg
name = mdict.get('role')
rule = mdict.get('rule')
robj = self.auth.reqRole(name)
robj.delRule(rule)
ret = s_tufo.tufo(mname, role=(name, robj._getAuthData()))
return True, ret
@reqAdmin
def __authAddAdmin(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
uobj = self.auth.reqUser(name)
uobj.setAdmin(True)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
@reqAdmin
def __authDelAdmin(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
uobj = self.auth.reqUser(name)
uobj.setAdmin(False)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
@reqAdmin
def __authAddUserRole(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
role = mdict.get('role')
uobj = self.auth.reqUser(name)
robj = self.auth.reqRole(role)
uobj.addRole(role)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
@reqAdmin
def __authDelUserRole(self, mesg):
mname, mdict = mesg
name = mdict.get('user')
role = mdict.get('role')
uobj = self.auth.reqUser(name)
robj = self.auth.reqRole(role)
uobj.delRole(role)
ret = s_tufo.tufo(mname, user=(name, uobj._getAuthData()))
return True, ret
class Auth(s_config.Config):
'''
An authorization object which can help enforce cortex rules.
Args:
dirn (str): Dictionary backing the Auth data.
conf (dict): Optional configuration data.
'''
def __init__(self, dirn, conf=None):
s_config.Config.__init__(self, opts=conf)
path = os.path.join(dirn, 'auth.lmdb')
mapsize = self.getConfOpt('lmdb:mapsize')
self.lenv = lmdb.open(path, max_dbs=128)
self.lenv.set_mapsize(mapsize)
self.onfini(self.lenv.close)
self._db_users = self.lenv.open_db(b'users')
self._db_roles = self.lenv.open_db(b'roles')
# these may be used by Auth() callers
self.users = {}
self.roles = {}
with self.lenv.begin() as xact:
for name, info in self._iterAuthDefs(xact, self._db_roles):
self.roles[name] = Role(self, name, info)
for name, info in self._iterAuthDefs(xact, self._db_users):
self.users[name] = User(self, name, info)
def initConfDefs(self):
self.addConfDefs((
('lmdb:mapsize', {'type': 'int', 'defval': s_lmdb.DEFAULT_SMALL_MAP_SIZE,
'doc': 'Memory map size for the auth LMDB.'}),
))
def addUser(self, name):
'''
Add a new user to the auth system.
Args:
name (str): The user name.
Returns:
User: The newly created user.
Raises:
s_exc.DupUserName: If the user already exists.
'''
with self.lenv.begin(write=True) as xact:
if self.users.get(name) is not None:
raise s_exc.DupUserName(name=name)
user = User(self, name)
self.users[name] = user
uenc = name.encode('utf8')
data = user._getAuthData()
data['vers'] = authver
byts = s_msgpack.en(data)
xact.put(uenc, byts, db=self._db_users)
return user
def delUser(self, name):
'''
Delete a user from the auth system.
Args:
name (str): The user name to delete.
Returns:
True: True if the operation succeeded.
Raises:
s_exc.NoSuchUser: If the user did not exist.
'''
with self.lenv.begin(write=True) as xact:
user = self.users.pop(name, None)
if user is None:
raise s_exc.NoSuchUser(user=name)
uenc = name.encode('utf8')
xact.delete(uenc, db=self._db_users)
return True
def addRole(self, name):
'''
Add a new role to the auth system.
Args:
name (str): The role name.
Returns:
Role: The newly created role.
Raises:
s_exc.DupRoleName: If the role already exists.
'''
with self.lenv.begin(write=True) as xact:
if self.roles.get(name) is not None:
raise s_exc.DupRoleName(name=name)
# Role() does rdef validation
role = Role(self, name)
self.roles[name] = role
renc = name.encode('utf8')
data = role._getAuthData()
data['vers'] = authver
byts = s_msgpack.en(role._getAuthData())
xact.put(renc, byts, db=self._db_roles)
return role
def delRole(self, name):
'''
Delete a role from the auth system.
Args:
name (str): The user name to delete.
Returns:
True: True if the operation succeeded.
Raises:
s_exc.NoSuchRole: If the role does not exist.
'''
with self.lenv.begin(write=True) as xact:
role = self.roles.pop(name, None)
if role is None:
raise s_exc.NoSuchRole(name=name)
nenc = name.encode('utf8')
xact.delete(nenc, db=self._db_roles)
for user in self.users.values():
role = user.roles.pop(name, None)
if role is not None:
nenc = user.name.encode('utf8')
data = user._getAuthData()
data['vers'] = authver
byts = s_msgpack.en(data)
xact.put(nenc, byts, db=self._db_users)
return True
def _iterAuthDefs(self, xact, db):
with xact.cursor(db=db) as curs:
for nenc, byts in curs.iternext():
name = nenc.decode('utf8')
info = s_msgpack.un(byts)
yield name, info
def _saveAuthData(self, name, info, db):
info['vers'] = authver
with self.lenv.begin(write=True) as xact:
lkey = name.encode('utf8')
lval = s_msgpack.en(info)
xact.put(lkey, lval, db=db)
def _saveUserInfo(self, user, info):
self._saveAuthData(user, info, self._db_users)
def _saveRoleInfo(self, role, info):
self._saveAuthData(role, info, self._db_roles)
def getUsers(self):
'''
Get a list of user names.
Returns:
list: List of user names.
'''
return list(self.users.keys())
def getRoles(self):
'''
Get a list of roles.
Returns:
list: List of role names.
'''
return list(self.roles.keys())
def reqUser(self, user):
'''
Get a user object.
Args:
user (str): Username to request.
Returns:
User: User object.
Raises:
s_exc.NoSuchUser: If the user does not exist.
'''
user = self.users.get(user)
if not user:
raise s_exc.NoSuchUser(user=user)
return user
def reqRole(self, role):
'''
Get a role object.
Args:
role (str): Name of the role object to get.
Returns:
Role: Role object.
Raises:
s_exc.NoSuchRole: If the role does not exist.
'''
role = self.roles.get(role)
if not role:
raise s_exc.NoSuchRole(role=role)
return role
class TagTree:
'''
A tag-oriented hierarchical permissions tree.
'''
def __init__(self):
self.root = (False, {})
self.cache = {}
def clear(self):
'''
Clear the tag tree and cache.
'''
self.cache.clear()
self.root = (False, {})
def add(self, tag):
'''
Add a tag to the tree.
Args:
tag (str): The tag (with no #)
'''
if len(tag) > 1 and '*' in tag:
raise s_exc.BadRuleValu(key='tag', valu=tag,
mesg='Tags >1 character cannot contain "*".')
node = self.root
for name in tag.split('.'):
step = node[1].get(name)
if step is None:
step = node[1][name] = [False, {}]
node = step
node[0] = True
self.cache.clear()
def get(self, tag):
'''
Get a tag status from the tree.
Args:
tag (str): Tag to get from the tree.
Notes:
If ``*`` has been added to the TagTree, this will
always return True.
Returns:
bool: True if the tag is in the tree, False otherwise.
'''
retn = self.cache.get(tag)
if retn is not None:
return retn
node = self.root
# Fast path for '*' perms
if '*' in node[1]:
self.cache[tag] = True
return True
for name in tag.split('.'):
step = node[1].get(name)
if step is None:
self.cache[tag] = False
return False
if step[0]:
self.cache[tag] = True
return True
node = step
return False
class AuthBase:
'''
Base class for implementing Auth rule checking.
('node:add', {'form': <form>}),
('node:del', {'form': <form>}),
('node:prop:set', {'form': <form>, 'prop': <prop>})
('node:tag:add', {'tag': <tag>}),
('node:tag:del', {'tag': <tag>}),
# * may be used to mean "any" for form/prop values.
'''
def __init__(self, auth, name, info=None):
if info is None:
info = {}
self.auth = auth
self.info = info
# it is ok for callers to access these...
self.name = name
self.admin = info.get('admin', False)
self.rules = list(info.get('rules', ()))
self._add_funcs = {
'node:add': self._addNodeAdd,
'node:del': self._addNodeDel,
'node:prop:set': self._addNodeSet,
'node:tag:add': self._addTagAdd,
'node:tag:del': self._addTagDel,
}
self._may_funcs = {
'node:add': self._mayNodeAdd,
'node:del': self._mayNodeDel,
'node:prop:set': self._mayNodeSet,
'node:tag:add': self._mayTagAdd,
'node:tag:del': self._mayTagDel,
}
# tags are a tree. so are the perms.
self._tag_add = TagTree()
self._tag_del = TagTree()
self._node_add = {} # <form>: True
self._node_del = {} # <form>: True
self._node_set = {} # (<form>,<prop>): True
self._initAuthData()
def setAdmin(self, admin):
'''
Set the admin value to True/False.
Args:
admin (bool): Value to set the admin value too.
Returns:
bool: The current AuthBase admin value.
'''
admin = bool(admin)
if admin == self.admin:
return admin
self.admin = admin
self._syncAuthData()
return admin
def addRule(self, rule):
'''
Add an allow rule.
Args:
rule ((str,dict)): A rule tufo to add.
Returns:
bool: True if the rule was added. False otherwise.
'''
ret = self._addRuleTufo(rule)
if not ret:
return ret
self.rules.append(rule)
self._syncAuthData()
return ret
def delRule(self, rule):
'''
Remove an allow rule.
Args:
rule ((str,dict)): A rule tufo to remove.
Returns:
True:
Raises:
s_exc.NoSuchRule: If the rule did not exist.
'''
try:
self.rules.remove(rule)
except ValueError:
raise s_exc.NoSuchRule(rule=rule, name=self.name,
mesg='Rule does not exist')
self._syncAuthData()
self._initAuthData()
return True
def _getAuthData(self):
return {
'admin': self.admin,
'rules': self.rules,
}
def allowed(self, perm, elev=True):
'''
Check if the user/role is allowed the given permission.
Args:
perm ((str,dict)): A permission tuple.
elev (bool): If true, allow admin status.
Returns:
bool: True if the permission is allowed. False otherwise.
'''
if self.admin and elev:
return True
func = self._may_funcs.get(perm[0])
if func is None:
logger.warning('unknown perm: %r' % (perm,))
return False
try:
return func(perm)
except Exception as e:
logger.warning('AuthBase "may" func error: %r' % (perm,))
return False
def _syncAuthData(self):
self.info = self._getAuthData()
self._saveAuthData()
def _saveAuthData(self): # pragma: no cover
raise s_exc.NoSuchImpl(name='_saveAuthData',
mesg='_saveAuthData not implemented by AuthBase')
def _initAuthData(self):
self._node_add.clear()
self._node_del.clear()
self._node_set.clear()
self._tag_add.clear()
self._tag_del.clear()
[self._addRuleTufo(rule) for rule in self.rules]
def _addRuleTufo(self, rule):
func = self._add_funcs.get(rule[0])
if func is None:
logger.warning('no such rule func: %r' % (rule,))
return False
try:
func(rule)
except Exception as e:
logger.exception('rule function error: %r' % (rule,))
return False
return True
#####################################################
def _addNodeAdd(self, rule):
form = rule[1].get('form')
if not form:
raise s_exc.BadRuleValu(key='form', valu=form,
mesg='node:add requires "form"')
self._node_add[form] = True
def _addNodeDel(self, rule):
form = rule[1].get('form')
if not form:
raise s_exc.BadRuleValu(key='form', valu=form,
mesg='node:del requires "form"')
self._node_del[form] = True
def _addNodeSet(self, rule):
form = rule[1].get('form')
prop = rule[1].get('prop')
if not form:
raise s_exc.BadRuleValu(key='form', valu=form,
mesg='node:set:prop requires "form"')
if not prop:
raise s_exc.BadRuleValu(key='valu', valu=prop,
mesg='node:set:prop requires "prop"')
self._node_set[(form, prop)] = True
def _addTagAdd(self, rule):
tag = rule[1].get('tag')
if not tag:
raise s_exc.BadRuleValu(key='tag', valu=tag,
mesg='node:tag:add requires "tag"')
self._tag_add.add(tag)
def _addTagDel(self, rule):
tag = rule[1].get('tag')
if not tag:
raise s_exc.BadRuleValu(key='tag', valu=tag,
mesg='node:tag:del requires "tag"')
self._tag_del.add(tag)
#####################################################
def _mayNodeAdd(self, perm):
form = perm[1].get('form')
if self._node_add.get(form):
return True
if self._node_add.get('*'):
return True
return False
def _mayNodeDel(self, perm):
form = perm[1].get('form')
if self._node_del.get(form):
return True
if self._node_del.get('*'):
return True
return False
def _mayNodeSet(self, perm):
form = perm[1].get('form')
prop = perm[1].get('prop')
if self._node_set.get((form, prop)):
return True
if self._node_set.get((form, '*')):
return True
if self._node_set.get(('*', '*')):
return True
return False
def _mayTagAdd(self, perm):
tag = perm[1].get('tag')
return self._tag_add.get(tag)
def _mayTagDel(self, perm):
tag = perm[1].get('tag')
return self._tag_del.get(tag)
#####################################################
class Role(AuthBase):
def _saveAuthData(self):
info = {
'admin': self.admin,
'rules': self.rules
}
self.auth._saveRoleInfo(self.name, info)
class User(AuthBase):
def _getAuthData(self):
info = AuthBase._getAuthData(self)
info['roles'] = list(self.roles.keys())
return info
def _saveAuthData(self):
info = self._getAuthData()
self.auth._saveUserInfo(self.name, info)
def _initAuthData(self):
AuthBase._initAuthData(self)
self.roles = {}
for name in self.info.get('roles', ()):
role = self.auth.roles.get(name)
if role is None: # pragma: no cover
logger.warning('user has non-existant role: %r' % (name,))
continue
self.roles[name] = role
def allowed(self, perm, elev=True):
'''
Check if a user is allowed the given permission.
Args:
perm ((str,dict)): A permission tuple.
elev (bool): If true, allow admin status.
Returns:
bool: True if the permission is allowed. False otherwise.
'''
if AuthBase.allowed(self, perm, elev=elev):
return True
for name, role in self.roles.items():
if role.allowed(perm, elev=elev):
return True
return False
def addRole(self, name):
'''
Grant a role to a user.
Args:
name (str): The name of the role to grant.
Returns:
True:
Raises:
s_exc.NoSuchRole: If the role does not exist.
'''
role = self.auth.roles.get(name)
if role is None:
raise s_exc.NoSuchRole(name=name)
self.roles[name] = role
self._saveAuthData()
return True
def delRole(self, name):
'''
Revoke a role from a user.
Args:
name (str): The name of the role to revoke.
Returns:
bool: True if the role was removed; False if the role was not on the user.
'''
role = self.roles.pop(name, None)
if role is None:
return False
self._saveAuthData()
return True
| vivisect/synapse | synapse/lib/auth.py | Python | apache-2.0 | 27,384 |
# proxy module
from __future__ import absolute_import
from blockcanvas.function_tools.function_library import *
| enthought/etsproxy | enthought/block_canvas/function_tools/function_library.py | Python | bsd-3-clause | 112 |
from tkinter import *
from math import *
import constantes
menuBas = False
menuHaut = False
selectPrecedent = 0
select = 0
def gestionMenu(fen, affichage, boutons):
global select, menuBas, menuHaut, selectPrecedent
# Changements dans le menu
if menuBas:
if select <= 2:
select += 1
elif menuHaut:
if select > 0:
select -= 1
menuHaut = False
menuBas = False
# verif les changements
if select != selectPrecedent:
selectPrecedent = select
actuali(fen, affichage, boutons)
#test si le choix est fait
if constantes.valider:
constantes.valider = False
if select == 0:
return constantes.CHOIX_PLAY
elif select == 1:
return constantes.CHOIX_HARDMODE
elif select == 2:
return constantes.CHOIX_OPTIONS
elif select == 3:
return constantes.CHOIX_QUITTER
constantes.valider = False
return constantes.CHOIX_NONE
def actuali(fen, affichage, boutons):
import ressources
for i in range(4):
if select == i:
affichage.itemconfig(boutons[i], image=ressources.boutonA[i])
else:
affichage.itemconfig(boutons[i], image=ressources.boutonD[i])
| TheOneDarkling/PAC-MAN-python-project | menu.py | Python | apache-2.0 | 1,134 |
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# This file is part of Guadalinex
#
# This software is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this package; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
__author__ = "Antonio Hernández <[email protected]>"
__copyright__ = "Copyright (C) 2011, Junta de Andalucía <[email protected]>"
__license__ = "GPL-2"
import LinkToChefHostnamePage
import LinkToChefResultsPage
import firstboot.pages.linkToChef
from firstboot_lib import PageWindow
from firstboot import serverconf
import firstboot.validation as validation
import gettext
from gettext import gettext as _
gettext.textdomain('firstboot')
__REQUIRED__ = False
__DEFAULT_ROLE__ = 'default_group'
def get_page(main_window):
page = LinkToChefConfEditorPage(main_window)
return page
class LinkToChefConfEditorPage(PageWindow.PageWindow):
__gtype_name__ = "LinkToChefConfEditorPage"
def finish_initializing(self):
self.update_server_conf = False
self.chef_is_configured = False
self.unlink_from_chef = False
def load_page(self, params=None):
if 'server_conf' in params:
self.server_conf = params['server_conf']
if not self.server_conf is None:
self.ui.lblVersionValue.set_label(self.server_conf.get_version())
self.ui.lblOrganizationValue.set_label(self.server_conf.get_organization())
self.ui.lblNotesValue.set_label(self.server_conf.get_notes())
self.ui.txtUrlChef.set_text(self.server_conf.get_chef_conf().get_url())
self.ui.txtUrlChefCert.set_text(self.server_conf.get_chef_conf().get_pem_url())
self.ui.txtHostname.set_text(self.server_conf.get_chef_conf().get_hostname())
self.ui.txtDefaultRole.set_text(self.server_conf.get_chef_conf().get_default_role())
if self.server_conf is None:
self.server_conf = serverconf.ServerConf()
if len(self.ui.txtDefaultRole.get_text()) == 0:
self.ui.txtDefaultRole.set_text(__DEFAULT_ROLE__)
self.update_server_conf = True
self.chef_is_configured = params['chef_is_configured']
self.unlink_from_chef = params['unlink_from_chef']
# if self.chef_is_configured and self.unlink_from_chef:
# self.ui.chkChef.get_child().set_markup(self._bold(_('This \
#workstation is going to be unlinked from the Chef server.')))
def _bold(self, str):
return '<b>%s</b>' % str
def translate(self):
desc = _('These parameters are required in order to join a Chef server:')
self.ui.lblDescription.set_text(desc)
self.ui.lblUrlChefDesc.set_label(_('"Chef URL": an existant URL in your server where Chef is installed.'))
self.ui.lblUrlChefCertDesc.set_label(_('"Chef Certificate": Validation certificate URL\
in order to autoregister this workstation in the Chef server.'))
self.ui.lblHostnameDesc.set_label(_('"Node Name": must be an unique name.'))
self.ui.lblDefaultRoleDesc.set_label(_('"Default Group": a global group for all the workstations in your organization.\nIf you are not an advanced Chef administrator, do not change this.'))
self.ui.lblVersion.set_label(_('Version'))
self.ui.lblOrganization.set_label(_('Organization'))
self.ui.lblNotes.set_label(_('Comments'))
self.ui.lblUrlChef.set_label('Chef URL')
self.ui.lblUrlChefCert.set_label(_('Certificate URL'))
self.ui.lblHostname.set_label(_('Node Name'))
self.ui.lblDefaultRole.set_label(_('Default Group'))
def previous_page(self, load_page_callback):
load_page_callback(firstboot.pages.linkToChef)
def next_page(self, load_page_callback):
if not self.unlink_from_chef:
result, messages = self.validate_conf()
if result == True:
result, messages = serverconf.setup_server(
server_conf=self.server_conf,
link_ldap=False,
unlink_ldap=False,
link_chef=not self.unlink_from_chef,
unlink_chef=self.unlink_from_chef
)
load_page_callback(LinkToChefResultsPage, {
'server_conf': self.server_conf,
'result': result,
'messages': messages
})
else:
result, messages = serverconf.setup_server(
server_conf=self.server_conf,
link_chef=not self.unlink_from_chef,
unlink_chef=self.unlink_from_chef
)
load_page_callback(LinkToChefResultsPage, {
'result': result,
'server_conf': self.server_conf,
'messages': messages
})
def on_serverConf_changed(self, entry):
if not self.update_server_conf:
return
self.server_conf.get_chef_conf().set_url(self.ui.txtUrlChef.get_text())
self.server_conf.get_chef_conf().set_pem_url(self.ui.txtUrlChefCert.get_text())
self.server_conf.get_chef_conf().set_default_role(self.ui.txtDefaultRole.get_text())
self.server_conf.get_chef_conf().set_hostname(self.ui.txtHostname.get_text())
def validate_conf(self):
valid = True
messages = []
if not self.server_conf.get_chef_conf().validate():
valid = False
messages.append({'type': 'error', 'message': _('Chef and Chef Cert URLs must be valid URLs.')})
hostname = self.server_conf.get_chef_conf().get_hostname()
if not validation.is_qname(hostname):
valid = False
messages.append({'type': 'error', 'message': _('Node name is empty or contains invalid characters.')})
try:
used_hostnames = serverconf.get_chef_hostnames(self.server_conf.get_chef_conf())
except Exception as e:
used_hostnames = []
# IMPORTANT: Append the error but don't touch the variable "valid" here,
# just because if we can't get the hostnames here,
# Chef will inform us about that later, while we are registering
# the client.
messages.append({'type': 'error', 'message': str(e)})
if hostname in used_hostnames:
valid = False
messages.append({'type': 'error', 'message': _('Node name already exists in the Chef server. Choose a different one.')})
return valid, messages
| gecos-team/gecos-firstboot | firstboot/pages/linkToChef/LinkToChefConfEditorPage.py | Python | gpl-2.0 | 7,126 |
# $Id: 125_sdp_with_multi_audio_0.py 2081 2008-06-27 21:59:15Z bennylp $
import inc_sip as sip
import inc_sdp as sdp
# Multiple good m=audio lines! The current algorithm in pjsua-lib will
# select the last audio (which should be okay, as we're entitled to
# select any of them)
sdp = \
"""
v=0
o=- 0 0 IN IP4 127.0.0.1
s=-
c=IN IP4 127.0.0.1
t=0 0
m=audio 5000 RTP/AVP 0
m=audio 4000 RTP/AVP 0
m=audio 3000 RTP/AVP 0
"""
pjsua_args = "--null-audio --auto-answer 200"
extra_headers = ""
include = ["Content-Type: application/sdp", # response must include SDP
"m=audio 0 RTP/AVP[\\s\\S]+m=audio 0 RTP/AVP[\\s\\S]+m=audio [1-9]+[0-9]* RTP/AVP"
]
exclude = []
sendto_cfg = sip.SendtoCfg("Mutiple good m=audio lines", pjsua_args, sdp, 200,
extra_headers=extra_headers,
resp_inc=include, resp_exc=exclude)
| wenxinguo/xisip | tests/pjsua/scripts-sendto/125_sdp_with_multi_audio_0.py | Python | gpl-2.0 | 825 |
from lavendeux import Types, Errors
import time
def call(args):
# Check number of arguments
if len(args) != 0:
return (Types.ERROR, Errors.INVALID_ARGS)
return(Types.INT, int(time.time()))
def decorate(value):
return time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(value))
def help():
return """A function not taking any arguments, Returns the current unix timestamp.
A decorator that formats a timestamp""" | rscarson/Lavendeux | bin/extensions/unixtime.py | Python | mit | 426 |
import os
import uuid
from io import StringIO
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.core.files.uploadedfile import InMemoryUploadedFile
from django.template import defaultfilters as filters
from .utils import scale_and_crop
from .settings import (
MARKDOWNX_IMAGE_MAX_SIZE,
MARKDOWNX_MEDIA_PATH,
MARKDOWNX_UPLOAD_CONTENT_TYPES,
MARKDOWNX_UPLOAD_MAX_SIZE,
)
class ImageForm(forms.Form):
image = forms.ImageField()
def save(self, commit=True):
img = scale_and_crop(self.files['image'], **MARKDOWNX_IMAGE_MAX_SIZE)
thumb_io = StringIO()
img.save(thumb_io, self.files['image'].content_type.split('/')[-1].upper())
file_name = str(self.files['image'])
img = InMemoryUploadedFile(thumb_io, "image", file_name, self.files['image'].content_type, thumb_io.len, None)
unique_file_name = self.get_unique_file_name(file_name)
full_path = os.path.join(settings.MEDIA_ROOT, MARKDOWNX_MEDIA_PATH, unique_file_name)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
destination = open(full_path, 'wb+')
for chunk in img.chunks():
destination.write(chunk)
destination.close()
return os.path.join(settings.MEDIA_URL, MARKDOWNX_MEDIA_PATH, unique_file_name)
def get_unique_file_name(instance, filename):
ext = filename.split('.')[-1]
filename = "%s.%s" % (uuid.uuid4(), ext)
return filename
def clean(self):
upload = self.cleaned_data['image']
content_type = upload.content_type
if content_type in MARKDOWNX_UPLOAD_CONTENT_TYPES:
if upload._size > MARKDOWNX_UPLOAD_MAX_SIZE:
raise forms.ValidationError(_('Please keep filesize under %(max)s. Current filesize %(current)s') % {'max':filters.filesizeformat(MARKDOWNX_UPLOAD_MAX_SIZE), 'current':filters.filesizeformat(upload._size)})
else:
raise forms.ValidationError(_('File type is not supported'))
return upload
| melbic/django-markdownx | markdownx/forms.py | Python | gpl-2.0 | 2,143 |
import Util
import time
import unittest
import tAnimator
import selectBrowser
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
# Test the Animator tape deck functionality
class tAnimatorTapeDeck(tAnimator.tAnimator):
def setUp(self):
browser = selectBrowser._getBrowser()
Util.setUp(self, browser)
# Test that the Animator can animate in the forward direction
# Under default settings, it takes roughly 2 seconds for the channel to change by 1
def test_channelAnimatorForwardAnimation(self):
driver = self.driver
# Open a test images
# Note: The test will fail unless the last image loaded has more than one channel
Util.load_image( self, driver, "aH.fits")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "Default")
# Record the first channel value of the test image
self._getFirstValue( driver, "Channel" )
print "Testing Channel Animator Forward Animation..."
print "First channel value: 0"
# Click the forward animate button
# Allow the image to animate for 2 seconds
self._animateForward( driver, "Channel" )
time.sleep(2)
# Check that the channel value is greater than the first channel value
currChannelValue = self._getCurrentValue( driver, "Channel" )
print "Current channel", currChannelValue
self.assertGreater( int(currChannelValue), 0, "Channel value did not increase for forward animation.")
self._stopAnimation( driver, "Channel")
# Change the Channel Animator to an Image Animator
self.channel_to_image_animator( driver )
# Go to the first image and record the first image value
self._getFirstValue( driver, "Image" )
print "Testing Image Animator Forward Animation..."
print "First image value: 0"
# Click the forward animate button
# Allow animation for 2 seconds
self._animateForward( driver, "Image" )
time.sleep(2)
# Check that the image value is greater than the first image value
currImageValue = self._getCurrentValue( driver, "Image" )
print "Current image", currImageValue
self.assertGreater( int(currImageValue), 0, "Image value did not increase for forward animation")
# Test that the Animator can animate in the reverse direction
# Under default settings, it takes roughly 2 seconds for the channel to change by 1
def test_channelAnimatorReverseAnimation(self):
driver = self.driver
# Open a test image so we have something to animate
Util.load_image( self, driver, "aH.fits")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "Default")
# Record the last channel value of the test image
self._getLastValue( driver, "Channel" )
lastChannelValue = self._getCurrentValue( driver, "Channel" )
print "Testing Channel Animator Reverse Animation..."
print "Last channel value:", lastChannelValue
# Click the reverse animate button. Scroll into view if not visible
# Allow image to animate for 2 seconds
reverseAnimateButton = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "ChannelTapeDeckReversePlay")))
driver.execute_script( "arguments[0].scrollIntoView(true);", reverseAnimateButton)
ActionChains(driver).click( reverseAnimateButton ).perform()
time.sleep(2)
# Check that the channel value is at a value less than the last channel value
currChannelValue = self._getCurrentValue( driver, "Channel" )
print "Current channel", currChannelValue
self.assertLess( int(currChannelValue), int(lastChannelValue), "Channel value did not decrease for reverse animation.")
self._stopAnimation( driver, "Channel")
# Change the Channel Animator to an Image Animator
self.channel_to_image_animator( driver )
# Go to the last image and record the last image value
self._getLastValue( driver, "Image" )
lastImageValue = self._getCurrentValue( driver, "Image" )
print "Testing Image Animator Reverse Animation..."
print "Last image value:", lastImageValue
# Click the reverse animate button. Scroll into view if not visible
# Allow image to animate for 2 seconds
reverseAnimateButton = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "ImageTapeDeckReversePlay")))
driver.execute_script( "arguments[0].scrollIntoView(true);", reverseAnimateButton)
ActionChains(driver).click( reverseAnimateButton ).perform()
time.sleep(2)
# Check that the image value is at a value less than the last image value
currImageValue = self._getCurrentValue( driver, "Image" )
print "Current image", currImageValue
self.assertLess( int(currImageValue), int(lastImageValue), "Image value did not decrease for reverse animation")
# Test that the Channel Animator can stop the animation
def test_channelAnimatorStopAnimation(self):
driver = self.driver
# Open a test image so we have something to animate
Util.load_image( self, driver, "aH.fits")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "Default")
# Allow the image to animate for 2 seconds
self._animateForward( driver, "Channel" )
time.sleep(2)
# Click on the Stop button. Scroll into view if not visible
stopButton = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "ChannelTapeDeckStopAnimation")))
driver.execute_script( "arguments[0].scrollIntoView(true);", stopButton)
ActionChains(driver).click( stopButton ).perform()
channelValue = self._getCurrentValue( driver, "Channel" )
# Wait for another 2 seconds. Ensure the channel value did not change
time.sleep(2)
currChannelValue = self._getCurrentValue( driver, "Channel" )
self.assertEqual( int(currChannelValue), int(channelValue), "Channel animation did not stop" )
# Change the Channel Animator to an Image Animator
self.channel_to_image_animator( driver )
# Allow animation for 2 seconds
self._animateForward( driver, "Image" )
time.sleep(2)
# Click on the Stop button. Scroll into view if not visible
stopButton = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, "ImageTapeDeckStopAnimation")))
driver.execute_script( "arguments[0].scrollIntoView(true);", stopButton)
ActionChains(driver).click( stopButton ).perform()
imageValue = self._getCurrentValue( driver, "Image" )
# Wait for another 2 seconds. Ensure that the image value did not change
time.sleep(2)
currImageValue = self._getCurrentValue( driver, "Image" )
self.assertEqual( int(currImageValue), int(imageValue), "Image animation did not stop")
# Test that the Channel Animator can go to the first frame value of the test image
def test_channelAnimatorFirstValue(self):
driver = self.driver
# Open a test image so we have something to animate
Util.load_image( self, driver, "aH.fits")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "Default")
# Record the first channel value of the test image
self._getFirstValue( driver, "Channel" )
firstChannelValue = self._getCurrentValue( driver, "Channel" )
# Allow the image to animate for 2 seconds
# Click the first valid value button
# Check that the channel value is the same as the first channel value
self._animateForward( driver, "Channel" )
time.sleep(2)
self._getFirstValue( driver, "Channel" )
currChannelValue = self._getCurrentValue( driver, "Channel" )
self.assertEqual( int(currChannelValue), int(firstChannelValue), "Channel Animator did not return to first channel value")
# Change the Channel Animator to an Image Animator
self.channel_to_image_animator( driver )
# Go to the first image and record the first image value
self._getFirstValue( driver, "Image" )
firstImageValue = self._getCurrentValue( driver, "Image" )
# Allow animation for 2 seconds
self._animateForward( driver, "Image" )
time.sleep(2)
# Click the first valid value button
# Check that the image value is the same as the first image value
self._getFirstValue( driver, "Image" )
currImageValue = self._getCurrentValue( driver, "Image" )
self.assertEqual( int(currImageValue), int(firstImageValue), "Image Animator did not return to first image")
# Test that the Channel Animator can go to the last frame value of the test image
def test_channelAnimatorLastValue(self):
driver = self.driver
# Open a test image so we have something to animate
Util.load_image( self, driver, "Default")
# Record the last channel value of the test image
self._getLastValue( driver, "Channel" )
lastChannelValue = self._getCurrentValue( driver, "Channel" )
# Return to the first frame and allow the image to animate for 2 seconds
self._getFirstValue( driver, "Channel" )
self._animateForward( driver, "Channel" )
time.sleep(2)
# Click the last valid value button
# Check that the channel value is the same as the last channel value
self._getLastValue( driver, "Channel" )
currChannelValue = self._getCurrentValue( driver, "Channel" )
self.assertEqual( int(currChannelValue), int(lastChannelValue), "Channel Animator did not return to last channel value")
# Change the Channel Animator to an Image Animator
self.channel_to_image_animator( driver )
# Go to the first image and record the first image value
self._getLastValue( driver, "Image" )
lastImageValue = self._getCurrentValue( driver, "Image" )
# Allow animation for 2 seconds
self._animateForward( driver, "Image" )
time.sleep(2)
# Click the first valid value button
# Check that the image value is the same as the first image value
self._getLastValue( driver, "Image" )
currImageValue = self._getCurrentValue( driver, "Image" )
self.assertEqual( int(currImageValue), int(lastImageValue), "Image Animator did not return to last image")
# Test that the Channel Animator lower spin box cannot exceed boundary values
def test_animatorBoundary(self):
driver = self.driver
# Open a test image so we have something to animate
Util.load_image( self, driver, "aH.fits")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "Default")
# Find and record the last valid value of the animation
self._getLastValue( driver, "Channel" )
lastChannelValue = self._getCurrentValue( driver, "Channel" )
# Find and click the lower spin box
lowerBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ChannelLowerBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", lowerBoundText)
lowerBoundText.click()
# Check that the input to the lower spin box cannot be negative
lowerBoundValue = Util._changeElementText( self, driver, lowerBoundText, -10 )
self.assertGreaterEqual( int(lowerBoundValue), 0, "Channel lower bound value is negative")
# Check that the input to the lower spin box cannot be greater than the last channel value
lowerBoundValue = Util._changeElementText( self, driver, lowerBoundText, int(lastChannelValue )+10 )
self.assertLessEqual( int(lowerBoundValue), int(lastChannelValue), "Channel lower bound value is greater than the last channel value")
# Find and click the upper spin box
upperBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ChannelUpperBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", upperBoundText)
upperBoundText.click()
# Check that the input to the upper spin box cannot be greater than the last channel value
upperBoundValue = Util._changeElementText( self, driver, upperBoundText, int(lastChannelValue)+10 )
self.assertLessEqual( int(upperBoundValue), int(lastChannelValue), "Channel upper bound value is greater than the last valid channel value")
# Check that the input to the upper spin box cannot be less than the first channel value
upperBoundValue = Util._changeElementText( self, driver, upperBoundText, -10)
self.assertGreaterEqual( int(upperBoundValue), 0, "Channel Upper bound value is negative")
# Change the Channel Animator to an Image Animator
self.channel_to_image_animator( driver )
# Record the last image value
self._getLastValue( driver )
lastImageValue = self._getImageValue( driver )
# Find and click the lower spin box
lowerBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ImageLowerBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", lowerBoundText)
lowerBoundText.click()
# Check that the input to the lower spin box cannot be negative
lowerBoundValue = Util._changeElementText( self, driver, lowerBoundText, -10 )
self.assertGreaterEqual( int(lowerBoundValue), 0, "Image lower bound value is negative")
# Check that the input to the lower spin box cannot be greater than the last image value
lowerBoundValue = Util._changeElementText( self, driver, lowerBoundText, int(lastImageValue )+10 )
self.assertLessEqual( int(lowerBoundValue), int(lastImageValue), "Image lower bound value is greater than the last image value")
# Find and click the upper spin box
imageUpperBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ImageUpperBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", imageUpperBoundText)
imageUpperBoundText.click()
# Check that the input to the upper spin box cannot be greater than the last image value
imageUpperBoundValue = Util._changeElementText( self, driver, imageUpperBoundText, int(lastImageValue)+10 )
self.assertLessEqual( int(imageUpperBoundValue), int(lastImageValue), "Image upper bound value is greater than the last image value")
# Check that the input to the upper spin box cannot be less than the first image value
imageUpperBoundValue = Util._changeElementText( self, driver, imageUpperBoundText, -10)
self.assertGreaterEqual( int(imageUpperBoundValue), 0, "Image upper bound value is negative")
# Test that the Channel Animator upper and lower bound values do not change during animation
def test_channelAnimatorBoundaryAnimation(self):
driver = self.driver
# Open a test image so we have something to animate
Util.load_image( self, driver, "Default")
# Find and record the last valid value of the animation
self._getLastValue( driver, "Channel" )
lastChannelValue = self._getCurrentValue( driver, "Channel" )
# Find and click the lower spin box
lowerBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ChannelLowerBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", lowerBoundText)
lowerBoundText.click()
# Change the lower bound value
lowerBoundValue = Util._changeElementText(self, driver, lowerBoundText, 1)
# Find and click the upper spin box
upperBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ChannelUpperBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", upperBoundText)
upperBoundText.click()
# Change the upper bound value
upperBoundValue = Util._changeElementText( self, driver, upperBoundText, int(lastChannelValue)-1)
# Allow test image to animate for 2 seconds
self._animateForward( driver, "Channel" )
time.sleep(2)
# Check that the lower and upper bound values did not change during animation
lowerBound = lowerBoundText.get_attribute("value")
upperBound = upperBoundText.get_attribute("value")
self.assertEqual( int(lowerBound), int(lowerBoundValue), "Lower bound channel value changed during animation")
self.assertEqual( int(upperBound), int(upperBoundValue), "Upper bound channel value changed during animation")
def test_imageAnimatorBoundaryAnimation(self):
driver = self.driver
# Open a test image so we have something to animate
Util.load_image( self, driver, "aH.fits")
Util.load_image( self, driver, "aJ.fits")
Util.load_image( self, driver, "Default")
# Change the Channel Animator to an Image Animator
self.channel_to_image_animator( driver )
# Record the first image value
self._getFirstValue( driver, "Image" )
# Record the last image value
self._getLastValue( driver, "Image" )
lastImageValue = self._getCurrentValue( driver, "Image" )
# Find and click the lower spin box
lowerBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ImageLowerBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", lowerBoundText)
lowerBoundText.click()
# Change the lower bound value
imageLowerBoundValue = Util._changeElementText(self, driver, lowerBoundText, 1)
# Find and click the upper spin box
imageUpperBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ImageUpperBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", imageUpperBoundText)
imageUpperBoundText.click()
# Change the upper bound value
imageUpperBoundValue = Util._changeElementText( self, driver, imageUpperBoundText, int(lastImageValue)-1 )
# Allow animation for 2 seconds
self._animateForward( driver, "Image" )
time.sleep(2)
# Check that the lower and upper bound values did not change during animation
lowerBound = lowerBoundText.get_attribute("value")
upperBound = imageUpperBoundText.get_attribute("value")
self.assertEqual( int(lowerBound), int(imageLowerBoundValue), "Lower bound image value changed during animation")
self.assertEqual( int(upperBound), int(imageUpperBoundValue), "Upper bound image value changed during animation")
def tearDown(self):
# Close the browser
self.driver.close()
# Allow browser to fully close before continuing
time.sleep(2)
# Close the session and delete temporary files
self.driver.quit()
if __name__ == "__main__":
unittest.main()
| pfederl/CARTAvis | carta/html5/common/skel/source/class/skel/simulation/tAnimatorTapeDeck.py | Python | gpl-2.0 | 19,807 |
import os
from setuptools import setup, find_packages
import systemjs
def read_file(name):
with open(os.path.join(os.path.dirname(__file__), name)) as f:
return f.read()
readme = read_file('README.rst')
requirements = [
'django-appconf>=0.6'
]
test_requirements = [
'mock',
'coverage',
]
setup(
name='django-systemjs',
version=systemjs.__version__,
license='MIT',
# Packaging
packages=find_packages(exclude=('tests', 'tests.*', 'examples')),
install_requires=requirements,
include_package_data=True,
extras_require={
'test': test_requirements,
},
tests_require=test_requirements,
test_suite='runtests.runtests',
# PyPI metadata
description='Brings SystemJS to Django staticfiles',
long_description=readme,
author='Sergei Maertens',
author_email='[email protected]',
platforms=['any'],
url='https://github.com/sergei-maertens/django-systemjs',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'Operating System :: Unix',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Application Frameworks'
]
)
| gannetson/django-systemjs | setup.py | Python | mit | 1,414 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Import socket to get at constants for socketOptions
import socket
import pprint
# We need to import Axon - Kamaelia's core component system - to write Kamaelia components!
import Axon
# Import the server framework, the HTTP protocol handling, the minimal request handler, and error handlers
from Kamaelia.Chassis.ConnectedServer import SimpleServer
from Kamaelia.Chassis.ConnectedServer import MoreComplexServer
from Kamaelia.Protocol.HTTP.HTTPServer import HTTPServer
Axon.Box.ShowAllTransits = False
# This allows for configuring the request handlers in a nicer way. This is candidate
# for merging into the mainline code. Effectively this is a factory that creates functions
# capable of choosing which request handler to use.
def requestHandlers(URLHandlers, errorpages=None):
if errorpages is None:
import Kamaelia.Protocol.HTTP.ErrorPages as ErrorPages
errorpages = ErrorPages
def createRequestHandler(request):
if request.get("bad"):
return errorpages.websiteErrorPage(400, request.get("errormsg",""))
else:
for (prefix, handler) in URLHandlers:
if request["raw-uri"][:len(prefix)] == prefix:
request["uri-prefix-trigger"] = prefix
request["uri-suffix"] = request["raw-uri"][len(prefix):]
return handler(request)
return errorpages.websiteErrorPage(404, "No resource handlers could be found for the requested URL")
return createRequestHandler
class HelloHandler(Axon.Component.component):
def __init__(self, request):
super(HelloHandler, self).__init__()
self.request = request
def main(self):
resource = {
"statuscode" : "200",
"headers" : [
("content-type", "text/html"),
]
}
self.send(resource, "outbox"); yield 1
page = {
"data" : "<html><body><h1>Hello World</h1><P>Woo!!</body></html>",
}
self.send(page, "outbox"); yield 1
self.send(Axon.Ipc.producerFinished(self), "signal")
yield 1
# ----------------------------------------------------------------------------------------------------
#
# Simple WSGI Handler
#
import time
def simple_app(environ, start_response):
"""Simplest possible application object"""
status = '200 OK'
response_headers = [('Content-type','text/html'),('Pragma','no-cache')]
start_response(status, response_headers)
yield '<P> My Own Hello World!\n'
for i in sorted(environ.keys()):
yield "<li>%s: %s\n" % (i, environ[i])
yield "<li> Date:" + time.ctime()
# ----------------------------------------------------------------------------------------------------
#
# Simple WSGI Handler
#
def HTML_WRAP(app):
"""
Wraps the output of app in HTML
"""
def gen(environ, start_response):
"""The standard WSGI interface"""
yield "<html>\n"
yield "<body>\n"
for i in app(environ, start_response):
yield i
yield "</body>\n"
yield "</html>\n"
return gen
class _WSGIHandler(Axon.ThreadedComponent.threadedcomponent):
"""Choosing to run the WSGI app in a thread rather than the same
context, this means we don't have to worry what they get up
to really"""
def __init__(self, app_name, request, app):
super(_WSGIHandler, self).__init__()
self.app_name = app_name
self.request = request
self.environ = request
self.app = app
def start_response(self, status, response_headers):
self.status = status
self.response_headers = response_headers
def munge_headers(self):
for header in self.environ["headers"]:
cgi_varname = "HTTP_"+header.replace("-","_").upper()
self.environ[cgi_varname] = self.environ["headers"][header]
pprint.pprint(self.environ)
pprint.pprint(self.environ["headers"])
def main(self):
required = "*** REQUIRED FIX THIS ***"
headers = self.environ["headers"]
self.environ["REQUEST_METHOD"] = required # Required
self.environ["SCRIPT_NAME"] = self.app_name # Portion of URL that relates to the application object. May be empty. (eg /cgi-bin/test.pl)
self.environ["PATH_INFO"] = self.environ["uri-suffix"] # Remainder of request path after "SCRIPT_NAME", designating a content path may be empty.
if self.environ["uri-suffix"].find("?") != -1:
self.environ["QUERY_STRING"] = self.environ["uri-suffix"][self.environ["uri-suffix"].find("?")+1:]
else:
self.environ["QUERY_STRING"] = ""
# self.environ["QUERY_STRING"] = required # Portion of request URL that follows the ? - may be empty or absent
self.environ["CONTENT_TYPE"] = headers.get("content-type","") # Contents of an HTTP_CONTENT_TYPE field - may be absent or empty
self.environ["CONTENT_LENGTH"] = headers.get("content-length","") # Contents of an HTTP_CONTENT_LENGTH field - may be absent or empty
self.environ["SERVER_NAME"] = required # Server name published to the outside world
self.environ["SERVER_PORT"] = required # Server port published to the outside world
self.environ["SERVER_PROTOCOL"] = required # Version of protocol client _sent us_ (what they would like back)
consider = " **CONSIDER ADDING THIS -- eg: "
self.environ["SERVER_ADDR"] = consider + "192.168.2.9"
self.environ["HTTP_REFERER"] = consider + "-"
self.environ["SERVER_ADMIN"] = consider + "[no address given]"
self.environ["SERVER_SIGNATURE"] = consider + "...."
self.environ["SERVER_SOFTWARE"] = consider + "Apache/1.3.33 (Darwin)"
self.environ["SCRIPT_FILENAME"] = consider + "/usr/local/httpd/sites/com.thwackety/cgi/test.pl"
self.environ["DOCUMENT_ROOT"] = consider + "/usr/local/httpd/sites/com.thwackety/docs"
self.environ["REQUEST_URI"] = consider + "/cgi-bin/test.pl"
self.environ["SCRIPT_URL"] = consider + "/cgi-bin/test.pl"
self.environ["SCRIPT_URI"] = consider + "http://thwackety.com/cgi-bin/test.pl"
self.environ["REMOTE_ADDR"] = consider + "192.168.2.5"
self.environ["REMOTE_PORT"] = consider + "56669"
self.environ["DATE"] = consider + "Sat Sep 15 15:42:25 2007" #####
self.environ["PATH"] = consider + "/bin:/sbin:/usr/bin:/usr/sbin:/usr/libexec:/System/Library/CoreServices"
self.environ["GATEWAY_INTERFACE"] = consider + "CGI/1.1"
self.munge_headers()
R = [ x for x in self.app(self.environ, self.start_response) ]
resource = {
"statuscode" : self.status,
"headers" : self.response_headers,
}
self.send(resource, "outbox")
for fragment in R:
page = {
"data" : fragment,
}
self.send(page, "outbox")
self.send(Axon.Ipc.producerFinished(self), "signal")
def WSGIHandler(app_name, app):
def R(request):
return _WSGIHandler(app_name, request,app)
return R
def HTTPProtocol():
def foo(self,**argd):
print self.routing
return HTTPServer(requestHandlers(self.routing),**argd)
return foo
# Finally we create the actual server and run it.
class WebServer(MoreComplexServer):
routing = [
["/wsgi", WSGIHandler("/wsgi", HTML_WRAP(simple_app)) ],
]
protocol=HTTPProtocol()
port=8082
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
WebServer().run()
"""
Changed Webserver to use the newer MoreComplexServer:
* Requried change to HTTPServer
* HTTPParser
IPs now in request object passed out for a handler with keys
* peer, peerip
* localip, localport
"""
| sparkslabs/kamaelia | Sketches/MPS/HTTP/KamaeliaWebServer.py | Python | apache-2.0 | 8,676 |
# Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.xs.StringType import StringType
logger = logging.getLogger(__name__)
class VariableIdPattern(StringType):
# <xsd:pattern value="oval:[A-Za-z0-9_\-\.]+:var:[1-9][0-9]*"/>
def get_value_pattern(self):
return r'oval:[A-Za-z0-9_\-\.]+:var:[1-9][0-9]*'
| cjaymes/pyscap | src/scap/model/oval_5/VariableIdPattern.py | Python | gpl-3.0 | 978 |
from __future__ import absolute_import
from .._hook import import_hook
@import_hook(__name__)
def value_processor(name, raw_name, raw_value):
return raw_value
del import_hook
del value_processor
| tonyseek/python-envcfg | envcfg/raw/__init__.py | Python | mit | 204 |
#!/usr/bin/env python
"""
ffmpeg-normalize
ffmpeg / avconv macro for normalizing audio
Audio normalization script, normalizing media files to WAV output
This program normalizes audio to a certain dB level. The default is an RMS-based
normalization where the mean is lifted. Peak normalization is possible with the
-m/--max option. It takes any audio or video file as input, and writes the audio
part as output WAV file.
Usage:
ffmpeg-normalize [options] <input-file>...
Options:
-f --force Force overwriting existing files
-l, --level dB level to normalize to [default: -26]
-p --prefix <prefix> Normalized file prefix [default: 'normalized']
-m --max Normalize to the maximum (peak) volume instead of RMS
-v --verbose Enable verbose output
-n --dry-run Show what would be done, do not convert
-d --debug Show debug output
Examples:
ffmpeg-normalize -v file.mp3
ffmpeg-normalize -v *.avi
"""
#
# The MIT License (MIT)
#
# Copyright (c) 2014 Werner Robitza
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from docopt import docopt
import subprocess
import os
import re
import logging
from . import __version__
logger = logging.getLogger('ffmpeg_normalize')
logger.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.ERROR)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
args = dict()
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, __ = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
FFMPEG_CMD = which('ffmpeg') or which('avconv') or None
if 'avconv' in FFMPEG_CMD:
NORMALIZE_CMD = which('normalize-audio')
if not NORMALIZE_CMD:
raise SystemExit(
"avconv needs the normalize-audio command:\n"
" sudo apt-get install normalize-audio"
)
if not FFMPEG_CMD:
raise SystemExit("Could not find ffmpeg or avconv")
def run_command(cmd, raw=False, dry=False):
cmd = cmd.replace(" ", " ")
cmd = cmd.replace(" ", " ")
logger.debug("[command] {0}".format(cmd))
if dry:
return
if raw:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
else:
p = subprocess.Popen(cmd.split(" "), stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode == 0:
return stdout
else:
logger.error("Error running command: {}".format(cmd))
logger.error(str(stderr))
def ffmpeg_get_mean(input_file):
cmd = FFMPEG_CMD + ' -i "' + input_file + '" -filter:a "volumedetect" -vn -sn -f null /dev/null'
output = run_command(cmd, True)
logger.debug(output)
mean_volume_matches = re.findall(r"mean_volume: ([\-\d\.]+) dB", output)
if (mean_volume_matches):
mean_volume = float(mean_volume_matches[0])
else:
logger.error("could not get mean volume for " + input_file)
raise SystemExit
max_volume_matches = re.findall(r"max_volume: ([\-\d\.]+) dB", output)
if (max_volume_matches):
max_volume = float(max_volume_matches[0])
else:
logger.error("could not get max volume for " + input_file)
raise SystemExit
return mean_volume, max_volume
def ffmpeg_adjust_volume(input_file, gain, output):
global args
if not args['--force'] and os.path.exists(output):
logger.warning("output file " + output + " already exists, skipping. Use -f to force overwriting.")
return
cmd = FFMPEG_CMD + ' -y -i "' + input_file + '" -vn -sn -filter:a "volume=' + str(gain) + 'dB" -c:a pcm_s16le "' + output + '"'
output = run_command(cmd, True, args['--dry-run'])
# -------------------------------------------------------------------------------------------------
def main():
global args
args = docopt(__doc__, version=str(__version__), options_first=False)
if args['--debug']:
ch.setLevel(logging.DEBUG)
elif args['--verbose']:
ch.setLevel(logging.INFO)
logger.debug(args)
for input_file in args['<input-file>']:
if not os.path.exists(input_file):
logger.error("file " + input_file + " does not exist")
continue
path, filename = os.path.split(input_file)
basename = os.path.splitext(filename)[0]
output_file = os.path.join(path, args['--prefix'] + "-" + basename + ".wav")
if 'ffmpeg' in FFMPEG_CMD:
logger.info("reading file " + input_file)
mean, maximum = ffmpeg_get_mean(input_file)
logger.warning("mean volume: " + str(mean))
logger.warning("max volume: " + str(maximum))
target_level = float(args['--level'])
if args['--max']:
adjustment = target_level - maximum
else:
adjustment = target_level - mean
logger.warning("file needs " + str(adjustment) + " dB gain to reach " + str(args['--level']) + " dB")
if maximum + adjustment > 0:
logger.warning("adjusting " + input_file + " will lead to clipping of " + str(maximum + adjustment) + "dB")
ffmpeg_adjust_volume(input_file, adjustment, output_file)
else:
# avconv doesn't seem to have a way to measure volume level, so
# instead we use it to convert to wav, then use a separate programme
# and then convert back to the desired format.
# http://askubuntu.com/questions/247961/normalizing-video-volume-using-avconv
cmd = FFMPEG_CMD + ' -i ' + input_file + ' -c:a pcm_s16le -vn "' + output_file + '"'
output = run_command(cmd, True, args['--dry-run'])
cmd = NORMALIZE_CMD + ' "' + output_file + '"'
output = run_command(cmd, True, args['--dry-run'])
logger.info(output)
if __name__ == '__main__':
main()
| mvbattista/audio-normalize | ffmpeg_normalize/__main__.py | Python | mit | 7,614 |
from __future__ import division
from sklearn import mixture, metrics
import pylab as pl
import csv
import numpy as np
def inc_avg(currAvg=None, currNumElem=None, newElem=None):
if not currAvg is None and not currNumElem is None and not newElem is None:
newAvg = currAvg + (newElem - currAvg) / (currNumElem +1)
return newAvg
else:
raise Exception("inc_avg: something is None")
# reading file
for action in ['move', 'rotate', 'wax', 'fold', 'paint']:
numberOfSlices=0
actionName=action
print "Action: ", actionName
realDataMatrix=[]
partialResults=[]
partialVariances=[]
for a in range(6):
reader=csv.reader(open("/home/smorante/Repositories/cognitive/xgnitive/main/app/record/recorded3/"+actionName+"_"+str(a)+"/data.log","rb"),delimiter=' ')
x=list(reader)
temp0=np.array(x).astype('float')
#gaussians by time
newTimeValue= np.ceil(temp0[-1][1] - temp0[0][1])
numberOfSlices = int(inc_avg(numberOfSlices, a, newTimeValue))
## Get the time range and rescale
r = float(temp0[-1][1] - temp0[0][1])
temp0[:,1] = map(lambda x: (x - temp0[0][1]) / r, temp0[:,1])
# append
realDataMatrix.append(temp0.tolist())
#test all dimensions and sort
Xnoisy = np.vstack(realDataMatrix) # noisy dataset
Xnoisy = sorted(Xnoisy, key=lambda column: column[1])
## deletes first column (only -1 values)
Xnoisy = np.delete(Xnoisy,0,axis=1)
## bad way to delete last 8 columns
for d in range(8):
Xnoisy = np.delete(Xnoisy,9,axis=1)
#assigning new clean dataset to variable X in numpy array
X = np.array(Xnoisy)
## OVERLOAD
for h in range(numberOfSlices):
#print 'h', h
#print X.shape[0]
initial=(X.shape[0]/numberOfSlices)*h
final=(X.shape[0]/numberOfSlices)*(h+1)
#print X[initial:final].shape
best_gmm = mixture.GMM(n_components=1, covariance_type='full')
best_gmm.fit(X[initial:final])
#print "Best : ", best_gmm
#print "Best mean: ", best_gmm.means_.ravel().tolist()
partialResults.append(best_gmm.means_.ravel().tolist())
partialVariances.append(best_gmm.covars_.ravel().tolist())
print 'slices: ', len(partialResults)
print 'variances: ', partialVariances
# saving centers
#sortedPoints = sorted(partialResults, key=lambda point: point[0])
#np.savetxt("generalized/"+actionName+"Generalized", sortedPoints, fmt='%.14e')
| smorante/continuous-goal-directed-actions | simulated-CGDA/generalization/generalization_test2.py | Python | mit | 2,527 |
from lib.actions import BaseAction
__all__ = [
'CreatePoolMemberAction'
]
class CreatePoolMemberAction(BaseAction):
api_type = 'loadbalancer'
def run(self, region, pool_id, node_id, port):
driver = self._get_lb_driver(region)
pool = driver.ex_get_pool(pool_id)
node = driver.ex_get_node(node_id)
member = driver.ex_create_pool_member(pool, node, port)
return self.resultsets.formatter(member)
| armab/st2contrib | packs/dimensiondata/actions/create_pool_member.py | Python | apache-2.0 | 449 |
"""
Copyright (C) 2009 Hiroaki Kawai <[email protected]>
"""
_base32 = "0123456789bcdefghjkmnpqrstuvwxyz"
_base32_map = {_base32[i]: i for i in range(len(_base32))}
LONG_ZERO = 0
def _float_hex_to_int(f):
if f < -1.0 or f >= 1.0:
return None
if f == 0.0:
return 1, 1
h = f.hex()
x = h.find("0x1.")
assert x >= 0
p = h.find("p")
assert p > 0
half_len = len(h[x + 4:p]) * 4 - int(h[p + 1:])
if x == 0:
r = (1 << half_len) + ((1 << (len(h[x + 4:p]) * 4)) + int(h[x + 4:p], 16))
else:
r = (1 << half_len) - ((1 << (len(h[x + 4:p]) * 4)) + int(h[x + 4:p], 16))
return r, half_len + 1
def _encode_i2c(lat, lon, lat_length, lon_length):
precision = int((lat_length + lon_length) / 5)
if lat_length < lon_length:
a = lon
b = lat
else:
a = lat
b = lon
boost = (0, 1, 4, 5, 16, 17, 20, 21)
ret = ""
for _ in range(precision):
ret += _base32[(boost[a & 7] + (boost[b & 3] << 1)) & 0x1F]
t = a >> 3
a = b >> 2
b = t
return ret[::-1]
def encode(latitude, longitude, precision=12):
if latitude >= 90.0 or latitude < -90.0:
raise Exception("invalid latitude.")
while longitude < -180.0:
longitude += 360.0
while longitude >= 180.0:
longitude -= 360.0
xprecision = precision + 1
lat_length = lon_length = int(xprecision * 5 / 2)
if xprecision % 2 == 1:
lon_length += 1
a = _float_hex_to_int(latitude / 90.0)
o = _float_hex_to_int(longitude / 180.0)
if a[1] > lat_length:
ai = a[0] >> (a[1] - lat_length)
else:
ai = a[0] << (lat_length - a[1])
if o[1] > lon_length:
oi = o[0] >> (o[1] - lon_length)
else:
oi = o[0] << (lon_length - o[1])
return _encode_i2c(ai, oi, lat_length, lon_length)[:precision]
| aiven/journalpump | journalpump/geohash.py | Python | apache-2.0 | 1,883 |
# utility, wrappers, convenience and helper functions
# some are dna related
import random
from sys import *
import os, logging
import re
import shutil, tarfile
import math, socket
import urllib
import urllib2
from math import *
from ftplib import FTP
import unicodedata
import array, copy, posixpath
from posixpath import curdir, sep, pardir, join
try:
import simplexmlparse
except:
pass
try:
import dist # binomial functions from jacques van helden's lab
except:
pass
try:
import warnings
warnings.filterwarnings("ignore",category=DeprecationWarning)
import scipy.stats
import scipy
except:
pass
try:
import MySQLdb
except:
pass
# for compat with mac os tiger and redhat 8, uncomment the following two lines
# import sets
# set = sets.Set
# max' libs
import util
LOGLEVEL=0 # the lower the more messages you'll see
# empty class, you can add attributes as you like during run-time, you can also print it
class Object:
def __repr__(self):
lines = []
for i in self.__dict__:
lines.append("%s: "%i+str(self.__dict__[i]))
return "\n".join(lines)
# ----------- CONVENIENCE ------------------
def relpath(path, start=curdir):
"""Return a relative version of a path, backport to python2.4 from 2.6"""
"""http://www.saltycrane.com/blog/2010/03/ospathrelpath-source-code-python-25/ """
if not path:
raise ValueError("no path specified")
start_list = posixpath.abspath(start).split(sep)
path_list = posixpath.abspath(path).split(sep)
# Work out how much of the filepath is shared by start and path.
i = len(posixpath.commonprefix([start_list, path_list]))
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return curdir
return join(*rel_list)
def extractTar(self, path=".", members=None):
""" backport from 2.5 for earlier python versions"""
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 0700
self.extract(tarinfo, path)
# Reverse sort directories.
directories.sort(lambda a, b: cmp(a.name, b.name))
directories.reverse()
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
def revComp(seq):
table = { "a":"t", "A":"T", "t" :"a", "T":"A", "c":"g", "C":"G", "g":"c", "G":"C", "N":"N", "n":"n",
"Y":"R", "R" : "Y", "M" : "K", "K" : "M", "W":"W", "S":"S",
"H":"D", "B":"V", "V":"B", "D":"H", "y":"r", "r":"y","m":"k",
"k":"m","w":"w","s":"s","h":"d","b":"v","d":"h","v":"b","y":"r","r":"y" }
newseq = []
for nucl in reversed(seq):
newseq += table[nucl]
return "".join(newseq)
def resolveIupac(seq):
seq=seq.upper()
table = { "Y" : "TC", "R" : "GA", "M" : "AC", "K" : "GT", "S" : "GC", "W" : "AT", "H" : "ACT", "B" : "GTC", "V" : "GCA", "D" : "GAT", "N" : "ACTG"}
newseq = []
for nucl in seq:
if nucl in table:
newseq += "[%s]" % table[nucl]
else:
newseq += nucl
newstr = "".join(newseq)
#newstr = newstr.replace("N", "[ACTGN]")
return newstr
def safeGet(dict, key):
if key in dict:
return dict[key]
else:
log(1, "could not find key %s in dictionary" % key)
return None
class Logger:
def __init__(self, baseName=None, baseDir="log", prefix="", prefixWithHost=False):
if prefixWithHost:
prefix = socket.gethostname()+"."+prefix
if baseName=="stderr":
self.of = stderr
elif baseName==None:
fname = prefix + os.path.splitext(os.path.split(argv[0])[1])[0] + ".log"
if not os.path.isdir(baseDir):
#stderr.write("error: Logging facilities of this program require a directory %s for the logfiles\n"%baseDir)
#exit()
stderr.write("info: no log dir specified, logging to stderr")
of = stderr
logfname = os.path.join(baseDir, fname)
if os.path.exists(logfname):
os.rename(logfname, logfname+".old")
self.of = open(logfname, "w")
else:
self.of = open(baseName, "w")
def log(self, line, toStderr=False, onlyStderr=False):
if not onlyStderr:
self.of.write(line+"\n")
if toStderr or onlyStderr:
stderr.write(line+"\n")
def log(level, text):
# 0 = never suppress
# 1 = warning
# 2 = info
# 3 = debug
prefix = ""
if level >= LOGLEVEL:
if level == 1:
prefix = "warning:"
if level == 2:
prefix == "info:"
if level >= 3:
prefix == "debug:"
stderr.write(prefix+"%s\n" % text)
def error(text):
stderr.write("error: %s\n" % text)
exit(1)
def execCmdLine(cmdLine, progName=""):
logging.debug("Running %s" %cmdLine)
ret = os.system(cmdLine)
if ret==None or ret!=0:
logging.error("error while running this command: "+cmdLine)
exit(1)
def highlightOccurence(string, searchStr):
pos = string.lower().find(searchStr.lower())
endPos = pos+len(searchStr)
if pos==-1:
return None
tag = "--"
string = string[:pos] + tag + string[pos:endPos] + tag + string[endPos:]
return string
def readAniseed(asAnnot, tissueKeywords, bgGenesFile=None):
# read annotation, tissueKeywords is list of keywords that have to be found for a gene
# asAnnot is dict of lists, as returned by slurpdictlist
targetGenes = set()
bgGenes = set()
asBgGenes = set()
for annot, genes in asAnnot.iteritems():
asBgGenes.update(genes)
for kw in tissueKeywords:
if annot.find(kw)!=-1:
targetGenes.update(genes)
bgGenes.update(genes)
#stderr.write("Found %d target genes in file %s\n" % (len(targetGenes), asFile))
if bgGenesFile:
bgGenes = set(tabfile.slurplist(bgGenesFile, field=0))
#stderr.write("Found %d background genes in file %s\n" % (len(bgGenes), bgGenesFile))
else:
bgGenes=asBgGenes
return targetGenes, bgGenes
# ------------- HTML STUFF ----------------------------------
def parseXml(template, string):
parser = simplexmlparse.SimpleXMLParser( template )
obj = parser.parse(string)
return obj
def parseXmlFile(template, fname):
xml = open(fname).read()
return parseXml(template, xml)
def parseXmlUrl(template, url):
xml = util.httpGet(url).read()
return parseXml(template, xml)
def openFtpConn(host):
""" returns ftp conection object """
# format for ftp_proxy http://updateproxy.manchester.ac.uk:3128
ftpProxyString=os.environ.get("ftp_proxy")
if ftpProxyString==None:
ftp = FTP() # connect to host, default port
ftp.connect(host)
ftp.login() # user anonymous, passwd anonymous@
else:
ftp = FTP()
port = int(ftpProxyString.split(":")[2])
proxyHost = ftpProxyString.split("//")[1].split(":")[0]
print "using proxy %s, port %d" % (proxyHost, port)
print "connecting to host %s" % (host)
ftp.connect(proxyHost, port, 5)
ftp.login("anonymous@%s" % host, "[email protected]")
print "ok"
return ftp
def getFtpDir(ftp, dir, onlySubdirs=False):
""" return urls of directories in ftp-folder, needs a ftp connection object"""
#print dir
try:
ftp.cwd(dir)
except:
print ("error: directory %s does not seemt to exist on host %s" % (dir, ftp.host))
return None
lines = []
dirs = []
ftp.retrlines('LIST', lines.append) # list directory contents
for l in lines:
if onlySubdirs and not l.startswith("d"):
continue
fs = l.split()
subdir = fs[8]
dirs.append(os.path.join(dir, subdir))
return dirs
# -- for httpGet, helper class for redirects ---
class SmartRedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_301(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_301(
self, req, fp, code, msg, headers)
result.status = code
return result
def http_error_302(self, req, fp, code, msg, headers):
result = urllib2.HTTPRedirectHandler.http_error_302(
self, req, fp, code, msg, headers)
result.status = code
return result
def httpGet(url):
req = urllib2.Request(url)
opener = urllib2.build_opener(SmartRedirectHandler())
req.add_header('User-Agent', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.8) Gecko/20050524 Fedora/1.5 Firefox/1.5')
#html = urllib2.urlopen(req)
f = opener.open(req)
return f
def httpMatches(url, reStr):
""" return matches for given regex, use () around the part that you want to extract """
html = httpGet(url)
html= html.readlines()
regex = re.compile(reStr)
matches = []
for l in html:
matches.extend(regex.findall(l))
return matches
def httpDownload(url, fname, verbose=False):
if verbose:
print("Downloading %s to %s" % (url, fname))
fh = httpGet(url)
open(fname, "wb").write(fh.read())
def htmlHeader(fh, title):
fh.write("<html>\n<head>\n")
fh.write("<title>"+title+"</title>\n</head>")
fh.write("<body>\n")
def htmlFooter(fh):
if fh!=None:
fh.write("</body>\n")
fh.write("</html>")
def htmlH3(str):
fh.write("<h3>" + str + "</h3>\n")
def htmlLink(fh, text, url):
if fh!=None:
fh.write("<a href=\"" + url + "\">" + text + "</a>\n")
def htmlAnchor(fh, name):
if fh!=None:
fh.write("<a name=\"" + name + "\">\n")
def htmlAnchor(fh, name):
if fh!=None:
fh.write("<a name=\"" + name + "\">\n")
def htmlPre(fh, text):
if fh!=None:
fh.write("<pre>\n" + text + "</pre>\n")
def htmlH4(fh, text):
if fh!=None:
fh.write("<h4>\n" + text + "</h4>\n")
def htmlH3(fh, text):
if fh!=None:
fh.write("<h3>\n" + text + "</h3>\n")
def htmlRuler(fh):
if fh!=None:
fh.write("<hr>\n")
# ---------- MYSQL STUFF --------------------------------
def dbConnect(user, pwd, db, port=3306, host="localhost"):
db = MySQLdb.connect(host, user, pwd, db, port=port)
return db
def sql(db, sql, fields=None):
cursor = db.cursor()
if fields:
cursor.execute(sql, fields)
else:
cursor.execute(sql)
rows = cursor.fetchall()
# check if any column is an array (bug in mysqldb 1.2.1 but not in 1.2.2)
if len(rows)>0:
arrayType = type(array.array('c', "a"))
needConvert = False
row1 = rows[0]
for d in row1:
if type(d)==arrayType:
needConvert=True
break
if needConvert:
newRows = []
for row in rows:
newCols = []
for col in row:
if type(col)==arrayType:
newCols.append(col.tostring())
else:
newCols.append(col)
newRows.append(newCols)
rows = newRows
# end bugfix
cursor.close ()
db.commit()
return rows
# ------------- ALIGNING/FILTERING SEQUENCES (Wrappers, more or less) --------------------------------
def getPrimers(fileSuffix, faSeqString, maxProductSize, optSize, targetStart, targetEnd, minTm, oTm, maxTm, minGc, oGc, maxGc):
""" return primers from primer3 in format [primer1, primer2] where primer1/2 = (tm, gc, seq) (tm, gc,seq) """
seqfile = TEMPDIR + fileSuffix + ".fa"
f = open(seqfile , "w")
f.write(faSeqString)
f.close()
tmpFile = TEMPDIR + fileSuffix + ".primer3"
cmdLine = "eprimer3 %s -firstbaseindex 0 -productosize %d -target %d,%d -mintm %d -otm %d -maxtm %d -mingc %d -ogcpercent %d -maxgc %d -productsizerange 50-%d -out %s" % (seqfile, optSize, targetStart, targetEnd, minTm, oTm, maxTm, minGc, oGc, maxGc, maxProductSize, tmpFile)
execCmdLine(cmdLine, "primer3")
f = open(tmpFile, "r")
primers = []
for l in f:
l = l.strip()
fs = l.split()
if len(fs)>5 and fs[1]=="PRIMER":
tm = fs[4]
gc = fs[5]
seq = fs[6]
if fs[0]=="FORWARD":
curPrimer = [ (seq,tm,gc)]
else:
curPrimer.append( (seq, tm, gc) )
primers.append(curPrimer)
f.close()
return primers
# ------------------- SOME BASIC STATISTICS STUFF ---------------------
class Stats:
pass
def __repr__(self):
lines = []
lines.append("TP %f" % self.TP)
lines.append("FP %f" % self.FP)
lines.append("TN %f" % self.TN)
lines.append("FN %f" % self.FN)
lines.append("Sens %f" % self.Sens)
lines.append("Spec %f" % self.Spec)
lines.append("PPV %f" % self.PPV)
lines.append("CC %f" % self.CC)
return "\n".join(lines)
def hitStats(all, predicts, targets, notTargets=None, notPredicts=None):
#stats = trueFalsePositives(all, predicts, targets)
def divide(top, bottom):
if bottom!=0:
return float(top) / float(bottom)
else:
return 0.0
if notPredicts==None:
notPredicts = all.difference(predicts)
if notTargets==None:
notTargets = all.difference(targets)
#assert(len(notTargets)!=0)
assert(len(targets)!=0)
stats = Stats()
TP = len(targets.intersection(predicts))
FP = len(notTargets.intersection(predicts))
TN = len(notTargets.intersection(notPredicts))
FN = len(targets.intersection(notPredicts))
#TP = float(TP)
#FP = float(FP)
#TN = float(TN)
#FN = float(FN)
stats.TP = TP
stats.FP = FP
stats.TN = TN
stats.FN = FN
stats.Sens = divide(TP , (TP + FN))
stats.Spec = divide(TN , (TN + FP))
stats.PPV = divide(TP , (TP + FP)) # PRECISION aka Positive Predictive Value
# Precision measures the proportion of the claimed true functional sites that are indeed true functional sites.
stats.PC = divide(TP , (TP + FP) )
# Accuracy measures the proportion of predictions, both for true functional sites and false functional sites that are correct.
stats.Acc = divide((TP + TN) , (TP + FP + FN + TN))
CC_top = TP * TN - FN * FP
CC_bottom = math.sqrt((TP+FN)*(TN+FP)*(TP+FP)*(TN+FN))
stats.CC = divide( CC_top , CC_bottom )
return stats
# Binomial coefficients.
def choose(n, k):
"""binc(n, k): Computes n choose k."""
if (k > n): return 0
if (k < 0): return 0
if (k > int(n/2)):
k = n - k
rv = 1
for j in range(0, k):
rv *= n - j
rv /= j + 1
return int(rv)
def hypergProb(k, N, m, n):
""" Wikipedia: There is a shipment of N objects in which m are defective. The hypergeometric distribution describes the probability that in a sample of n distinctive objects drawn from the shipment exactly k objects are defective. """
#return float(choose(m, k) * choose(N-m, n-k)) / choose(N, n)
hp = float(scipy.comb(m, k) * scipy.comb(N-m, n-k)) / scipy.comb(N, n)
if scipy.isnan(hp):
stderr.write("error: not possible to calculate hyperg probability in util.py for k=%d, N=%d, m=%d, n=%d\n" %(k, N, m,n))
stdout.write("error: not possible to calculate hyperg probability in util.py for k=%d, N=%d, m=%d, n=%d\n" %(k, N, m,n))
return hp
# --------------------------------------------------------------------------------------------------------------------
def hypergProbSum(k, N, m, n):
""" calculate hypergeometric probability from 0 up to a certain value k, k IS NOT INCLUDED!!"""
""" result can be compared with R: sum(dhyper(x=42:1518, m=129, n=(1518-129), k=125)) """
""" if 1518 genes, 129 in foreground, 125 predicted genes and overlap of 42 """
""" in R this is 2.204418e-17"""
sum=0.0
for i in range(0, k):
sum += hypergProb(i, N, m, n)
return sum
def factorial(n, _known=[1]):
assert isinstance(n, int), "Need an integer. This isn't a gamma"
assert n >= 0, "Sorry, can't factorilize a negative"
assert n < 1000, "No way! That's too large"
try:
return _known[n]
except IndexError:
pass
for i in range(len(_known), n+1):
_known.append(_known[-1] * i)
return _known[n]
def poissProb(n, p, k):
""" binomial probability: n number of objects, p = probability of success, k = number of trials """
l = n*p
#stderr.write("poissProb: n=%d, p=%f, k=%d\n" % (n,p,k))
return (exp(-l)*l**k) / factorial(k)
def binProb(n, p, k):
""" binomial probability: n number of objects, p = probability of success, k = number of trials """
#return choose(n, k) * p**k * (1-p)**(n-k)
return scipy.stats.distributions.binom.pmf(k-1, n, p)
def binProbGt(k, size, prob):
""" binomial probability that x is > k (up to n). The corresponding R code for this is: pbinom(k, size = n, prob = p, lower=F) """
# -- manually, not exact enough
#sum = 0.0
#for i in range(0, k):
#sum+=binProb(n, p, i)
#return sum
# -- using scipy, not exact enough:
# 1.0 - cdf is not as exact as sf
#return 1.0 - scipy.stats.distributions.binom.cdf(k-1, n, p)
# scipy is too complicated to compile on the cluster
#return scipy.stats.distributions.binom.sf(k, size, prob)
return dist.pbinom(k, size,prob)
def poissProbSum(n, p, k):
""" poisson probability from 0 to k, k is NOT INCLUDED!"""
sum = 0.0
for i in range(0, k):
sum+=poissProb(n, p, i)
return sum
def poissProbSum_scipy(n, p, k):
""" poisson probability from 0 to k, k is NOT INCLUDED!"""
m = n*p
sum=scipy.special.pdtr(k-1, m)
return sum
def statsAddPVal(stats,flankingTargetGenes, flankingAnnotatedGenes, geneHasTargetAnnot, geneHasAnnot,noHyperg=False, geneScores=None):
assert(False)
# probab to find target in allannotated
m = len(geneHasTargetAnnot)
N = len(geneHasAnnot)
# probab to find target in flankingAnnotated
k = len(flankingTargetGenes)
n = len(flankingAnnotatedGenes)
if noHyperg:
pVal_hgm=0
else:
pVal_hgm = 1.0 - util.hypergProbSum(k, N, m, n)
stats.hypergPval = float(pVal_hgm)
stats.hypergParams = {'N': N, 'm' : m, 'n' : n, 'k' : k, 'pVal' : pVal_hgm }
if N!=0:
p = float(m)/N
else:
p = 0.0
#pVal_bp = 1.0 - util.binProbSum(n, p, k)
pVal_bp = util.binProbGt(k, size=n, prob=p)
stats.bnpPval = pVal_bp
stats.bnpParams = {'n': n, 'k' : k, 'pVal' : pVal_bp, 'p' : p}
pVal_poiss = 1.0 - util.poissProbSum(n, p, k)
stats.pVal_poisson = pVal_poiss
stats.poissParams = {'lambda' : n*p, 'n': n, 'k' : k, 'pVal' : pVal_poiss, 'p' : p}
# corrected binom. probab., using relation target CNS len / all CNS len as p
if geneScores:
targetScore = sum([geneScores.get(g,0) for g in geneHasTargetAnnot])
annotScore = sum([geneScores.get(g,0) for g in geneHasAnnot])
flankAnnotScore = sum([geneScores.get(g,0) for g in flankingAnnotatedGenes])
flankTargetScore = sum([geneScores.get(g,0) for g in flankingTargetGenes])
avg_All_Score = float(annotScore)/ N
avg_Trg_Score = float(targetScore)/ m
corrFactor = (avg_Trg_Score / (avg_All_Score+1))
corr_p = corrFactor * p
#corr_pVal_bp = 1.0 - util.binProbSum(n, corr_p, k)
#corr_pVal_bp = 9999.0
corr_pVal_bp = util.binProbGt(k, size=n, prob=corr_p)
stats.corr_bnpPval = corr_pVal_bp
stats.corr_bnpParams = {'consTarget': targetScore, 'consAnnot' : annotScore, 'consFlankAnnot' : flankAnnotScore, 'consFlankTarget' : flankTargetScore, 'avgConsTarget' : avg_Trg_Score, 'avgConsAnnot' : avg_All_Score,'n': n, 'k' : k, 'pVal' : corr_pVal_bp, 'p' : corr_p, 'corrFactor' : corrFactor}
return stats
def hitStatsWithPVal(self,predictedGenes, geneHasTargetAnnot, geneHasAnnot, noHyperg=False, geneScores=None):
""" get stats given a set of predicted genes + calc pvalues """
assert(False)
stats = hitStats(geneHasAnnot, predictedGenes, geneHasTargetAnnot)
flankingAnnotatedGenes = predictedGenes.intersection(geneHasAnnot)
flankingTargetGenes = predictedGenes.intersection(geneHasTargetAnnot)
statsAddPVal(stats, flankingTargetGenes, flankingAnnotatedGenes, geneHasTargetAnnot, geneHasAnnot, noHyperg, geneScores=None)
return stats
def resolveIupac(seq):
""" convert iupac string to regex """
#table = { "Y" : "TCY", "R" : "GAR", "M" : "ACM", "K" : "GTK", "S" : "GCS", "W" : "ATW", "H" : "ACTHYKW", "B" : "GTCBKYS", "V" : "GCAVSR", "D" : "GATDRWK", "N" : "ACTGNYRMKWSHBVD"}
table = { "Y" : "TC", "R" : "GA", "M" : "AC", "K" : "GT", "S" : "GC", "W" : "AT", "H" : "ACT", "B" : "GTC", "V" : "GCA", "D" : "GAT", "N" : "ACTG"}
newseq = []
for nucl in seq:
if nucl in table:
newseq += "[%s]" % table[nucl]
else:
newseq += nucl
newstr = "".join(newseq)
#newstr = newstr.replace("N", "[ACTGN]")
return newstr
# copied from http://python.genedrift.org/code/dnatranslate.py
def translate_dna(sequence):
#dictionary with the genetic code
# modified max: accomodate CTN/CCN-codes, same as in ensembl code
gencode = {
'ATA':'I', 'ATC':'I', 'ATT':'I', 'ATG':'M',
'ACA':'T', 'ACC':'T', 'ACG':'T', 'ACT':'T',
'AAC':'N', 'AAT':'N', 'AAA':'K', 'AAG':'K',
'AGC':'S', 'AGT':'S', 'AGA':'R', 'AGG':'R',
'CTA':'L', 'CTC':'L', 'CTG':'L', 'CTT':'L',
'CTN':'L',
'CCA':'P', 'CCC':'P', 'CCG':'P', 'CCT':'P',
'CCN':'P',
'CAC':'H', 'CAT':'H', 'CAA':'Q', 'CAG':'Q',
'CGA':'R', 'CGC':'R', 'CGG':'R', 'CGT':'R',
'CGN':'R',
'GTA':'V', 'GTC':'V', 'GTG':'V', 'GTT':'V',
'GTN':'V',
'GCA':'A', 'GCC':'A', 'GCG':'A', 'GCT':'A',
'GCN':'A',
'GAC':'D', 'GAT':'D', 'GAA':'E', 'GAG':'E',
'GGA':'G', 'GGC':'G', 'GGG':'G', 'GGT':'G',
'GGN':'G',
'TCA':'S', 'TCC':'S', 'TCG':'S', 'TCT':'S',
'TCN':'S',
'TTC':'F', 'TTT':'F', 'TTA':'L', 'TTG':'L',
'TAC':'Y', 'TAT':'Y', 'TAA':'_', 'TAG':'_',
'TGC':'C', 'TGT':'C', 'TGA':'_', 'TGG':'W',
}
proteinseq = ''
#loop to read DNA sequence in codons, 3 nucleotides at a time
for n in range(0,len(sequence),3):
#checking to see if the dictionary has the key
if gencode.has_key(sequence[n:n+3]) == True:
proteinseq += gencode[sequence[n:n+3]]
else:
proteinseq += "X" # modif max: to make it the same as ensembl
#return protein sequence
return proteinseq
def stripUtrs(cdnaSeq, pepSeq):
""" using peptide sequence, remove utrs from cdna sequence: translate in all 3 frames, search peptide, remove flanking parts """
pepSeqFrames = []
pepSeqFrames.append(translate_dna(cdnaSeq))
pepSeqFrames.append(translate_dna(cdnaSeq[1:]))
pepSeqFrames.append(translate_dna(cdnaSeq[2:]))
uPepSeq = pepSeq.replace("-","")
pepRe = re.compile(uPepSeq)
frame=0
for trans in pepSeqFrames:
#print "frame=",frame
#print "cdna :", cdnaSeq
#print "trans :", "--".join(trans)
#print "orig :", "--".join(uPepSeq)
#print "found :", trans.find(uPepSeq)
match = pepRe.search(trans)
if match!=None:
start = match.start()
end = match.end()
#print start,end
return cdnaSeq[start*3+frame:end*3+frame]
frame+=1
def findSubdirFiles(baseDir, extension):
""" Generator: traverse a baseDir and all subdirectories to find all files with a certain extension, extension is dot plus the extension, like ".xml" """
#result = []
for root, dirs, files in os.walk(baseDir):
for f in files:
if extension==None or os.path.splitext(f)[1]==extension:
path = os.path.join(root, f)
yield path
def baseNFill(num, base, numerals, length):
""" like baseN, but will fill up with the first symbol (=0) up to length """
text = baseN(num, base, numerals)
while len(text) < length:
text = numerals[0]+text
return text
def baseN(num, base=49, numerals="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"):
"""
Convert any int to base/radix 2-36 string. Special numerals can be used
to convert to any base or radix you need. This function is essentially
an inverse int(s, base).
For example:
>>> baseN(-13, 4)
'-31'
>>> baseN(91321, 2)
'10110010010111001'
>>> baseN(791321, 36)
'gyl5'
>>> baseN(91321, 2, 'ab')
'babbaabaababbbaab'
"""
if num == 0:
return numerals[0]
if num < 0:
return '-' + baseN((-1) * num, base, numerals)
if not 2 <= base <= len(numerals):
raise ValueError('Base must be between 2-%d' % len(numerals))
left_digits = num // base
if left_digits == 0:
return numerals[num % base]
else:
return baseN(left_digits, base, numerals) + numerals[num % base]
def remove_accents(str):
""" remove accents from unicode string and return as ascii, replace with non-accented similar ascii characters """
nkfd_form = unicodedata.normalize('NFKD', unicode(str))
return u"".join([c for c in nkfd_form if not unicodedata.combining(c)])
def removeAccents(unicodeString):
""" remove accents from unicode string and return as ascii, replace with non-accented similar ascii characters """
nkfd_form = unicodedata.normalize('NFKD', unicodeString) # replace accents
cleanStr = u"".join([c for c in nkfd_form if not unicodedata.combining(c)]) # remove diacritics
cleanStr = u"".join([c for c in cleanStr if ord(c) < 128]) # remove diacritics
return cleanStr.decode("ascii")
def makeDirs(dir):
""" if it does not exist, create it. expand unix spec chars """
dir = os.path.expanduser(dir)
if not os.path.isdir(dir):
#path=shell.SHGetFolderPath(0, shellcon.CSIDL_PERSONAL, None, 0)
log("creating %s" % dir)
os.makedirs(dir)
return dir
def incCounts(counts, maxCounts):
"""
increase counts, up to maxCounts
"""
colCount = len(maxCounts)
for pos in range(0, colCount):
if counts[pos] < maxCounts[pos]-1:
counts[pos] += 1
return counts
else:
if pos == colCount:
return counts
counts[pos] = 0
def iterCounts(maxCounts):
"""
given a list of i maximums, iterate over all possible
values for each position of this list, from 0 to max(i)
"""
counts = [0,0,0]
while True:
yield counts
counts = incCounts(counts, maxCounts)
if counts==None:
break
| maximilianh/maxtools | lib/util.py | Python | gpl-2.0 | 27,667 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import mock
from exam import fixture
from sentry.testutils import TestCase
from sentry.interfaces.user import User
from sentry.models import Event
class UserTest(TestCase):
@fixture
def event(self):
return mock.Mock(spec=Event())
@fixture
def interface(self):
return User.to_python(dict(
id=1,
email='[email protected]',
favorite_color='brown',
))
def test_path(self):
assert self.interface.get_path() == 'sentry.interfaces.User'
def test_serialize_behavior(self):
assert self.interface.to_json() == {
'id': '1',
'email': '[email protected]',
'data': {
'favorite_color': 'brown'
}
}
def test_invalid_ip_address(self):
with self.assertRaises(Exception):
User.to_python(dict(
ip_address='abc',
))
def test_invalid_email_address(self):
with self.assertRaises(Exception):
User.to_python(dict(
email=1,
))
with self.assertRaises(Exception):
User.to_python(dict(
email='foo',
))
def test_serialize_unserialize_behavior(self):
result = type(self.interface).to_python(self.interface.to_json())
assert result.to_json() == self.interface.to_json()
| jean/sentry | tests/sentry/interfaces/test_user.py | Python | bsd-3-clause | 1,453 |
#!/usr/bin/env python
from __future__ import print_function
import argparse
import json
import os
import subprocess
import sys
from kubos_build import KubosBuild
import utils
this_dir = os.path.abspath(os.path.dirname(__file__))
root_dir = os.path.dirname(this_dir)
class KubosBuilder(object):
def __init__(self):
self.kb = KubosBuild()
self.modules = self.kb.modules()
self.targets = self.kb.targets()
def list_targets(self):
for target in self.kb.targets():
if 'buildTarget' in target.yotta_data:
print(target.yotta_name())
def list_modules(self):
for module in self.kb.modules():
print(module.yotta_name())
def find_modules(self, path):
path_list = path.split("/")
modules = set()
# Pop off file name for first directory
path_list.pop()
while len(path_list):
new_path = "/".join(path_list)
kubos_build = KubosBuild(kubos_dir=new_path)
for p in kubos_build.projects:
if p.type != "unknown":
modules.add(p.yotta_name())
if len(modules):
break
path_list.pop()
return modules
def list_changed_modules(self, ref):
try:
git_output = subprocess.check_output(["git", "diff", "--numstat", ref])
git_lines = [l for l in git_output.splitlines()]
file_paths = [l.split()[2] for l in git_lines]
modules = set()
for path in file_paths:
modules = modules | (self.find_modules(path))
if len(modules):
print("Modules changed:")
for m in modules:
print(m)
return 0
except subprocess.CalledProcessError:
print("Error getting changed modules")
return 1
def build(self, module_name="", target_name=""):
module = next((m for m in self.kb.modules() if m.yotta_name() == module_name), None)
target = next((t for t in self.kb.targets() if t.yotta_name() == target_name), None)
if module and target:
print('Building [module %s@%s] for [target %s] - ' % (module.yotta_name(), module.path, target_name), end="")
utils.cmd('kubos', 'target', target_name, cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
utils.cmd('kubos', 'clean', cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
ret = utils.cmd('yt', 'build', cwd=module.path, echo=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print('Result %d' % ret)
return ret
else:
if module is None:
print("Module %s was not found" % module_name)
if target is None:
print("Target %s was not found" % target_name)
return 1
def build_all_targets(self, module_name=""):
ret = 0
module = next((m for m in self.kb.modules() if m.yotta_name() == module_name), None)
if module:
for target in self.kb.build_targets():
build_ret = self.build(module.yotta_name(), target.yotta_name())
if build_ret != 0:
ret = build_ret
return ret
else:
print("Module %s was not found" % module_name)
return 1
def build_all_modules(self, target_name=""):
ret = 0
target = next((t for t in self.kb.targets() if t.yotta_name() == target_name), None)
if target:
for module in self.kb.modules():
build_ret = self.build(module.yotta_name(), target.yotta_name())
if build_ret != 0:
ret = build_ret
return ret
else:
print("Target %s was not found" % target_name)
return 1
def build_all_combinations(self):
ret = 0
for target in self.kb.targets():
for module in self.kb.modules():
build_ret = self.build(module.yotta_name(), target.yotta_name())
if build_ret != 0:
ret = build_ret
return ret
def main():
parser = argparse.ArgumentParser(
description='Builds Kubos modules')
parser.add_argument('--target', metavar='target',
help='Specifies target to build modules for')
parser.add_argument('--module', metavar='module',
help='Specifies modules to build')
parser.add_argument('--all-targets', action='store_true', default=False,
help='Builds module for all targets')
parser.add_argument('--all-modules', action='store_true', default=False,
help='Builds all modules for target')
parser.add_argument('--list-targets', action='store_true', default=False,
help='Lists all targets available for building')
parser.add_argument('--list-modules', action='store_true', default=False,
help='Lists all modules found')
parser.add_argument('--list-changed-modules', action="store", nargs="?",
dest="list_changed_modules", const="HEAD^!",
help='Lists modules that have changed. By default will diff against '
'the last commit. The git diff path desired can also be passed in')
args = parser.parse_args()
builder = KubosBuilder()
ret = 0
if args.list_targets:
ret = builder.list_targets()
elif args.list_modules:
ret = builder.list_modules()
elif args.list_changed_modules:
ret = builder.list_changed_modules(args.list_changed_modules)
elif args.target and args.module:
ret = builder.build(module_name=args.module, target_name=args.target)
elif args.module and args.all_targets:
ret = builder.build_all_targets(module_name=args.module)
elif args.target and args.all_modules:
ret = builder.build_all_modules(target_name=args.target)
elif args.all_targets and args.all_modules:
ret = builder.build_all_combinations()
else:
parser.print_help()
ret = -1
sys.exit(ret)
if __name__ == '__main__':
main()
| Psykar/kubos | tools/build.py | Python | apache-2.0 | 6,337 |
from django.conf import settings
from django.core.cache import cache
from django.http import Http404
from django.utils.encoding import smart_unicode
import redis
#Thanks to debug-toolbar for the response-replacing code.
_HTML_TYPES = ('text/html', 'application/xhtml+xml')
OUR_CODE = """
<hr> <!-- End original user content -->
<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-17997319-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
<style type="text/css">
.badge { position: fixed; display: block; bottom: 5px; height: 40px; text-indent: -9999em; border-radius: 3px; -moz-border-radius: 3px; -webkit-border-radius: 3px; box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset; -moz-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset; -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 1px 0 rgba(255, 255, 255, 0.2) inset; }
.badge.rtd { background: #257597 url(http://media.readthedocs.org/images/badge-rtd.png) top left no-repeat; border: 1px solid #282E32; width: 160px; right: 5px; }
</style>
<a href="http://readthedocs.org?fromdocs=middleware" class="badge rtd">Brought to you by Read the Docs</a>
"""
def replace_insensitive(string, target, replacement):
"""
Similar to string.replace() but is case insensitive
Code borrowed from: http://forums.devshed.com/python-programming-11/case-insensitive-string-replace-490921.html
"""
no_case = string.lower()
index = no_case.rfind(target.lower())
if index >= 0:
return string[:index] + replacement + string[index + len(target):]
else: # no results so return the original string
return string
class SubdomainMiddleware(object):
def process_request(self, request):
if settings.DEBUG:
return None
host = request.get_host()
if ':' in host:
host = host.split(':')[0]
domain_parts = host.split('.')
#Google was finding crazy www.blah.readthedocs.org domains.
if len(domain_parts) > 3:
if not settings.DEBUG:
raise Http404('Invalid hostname')
if len(domain_parts) == 3:
subdomain = domain_parts[0]
if not (subdomain.lower() == 'www') and 'readthedocs.org' in host:
request.subdomain = True
request.slug = subdomain
request.urlconf = 'core.subdomain_urls'
return None
if len(domain_parts) == 3:
subdomain = domain_parts[0]
if not (subdomain.lower() == 'www') and 'rtfd.org' in host:
request.slug = subdomain
request.urlconf = 'core.djangome_urls'
return None
if 'readthedocs.org' not in host \
and 'localhost' not in host \
and 'testserver' not in host:
request.cname = True
try:
slug = cache.get(host)
if not slug:
redis_conn = redis.Redis(**settings.REDIS)
from dns import resolver
answer = [ans for ans in resolver.query(host, 'CNAME')][0]
domain = answer.target.to_unicode()
slug = domain.split('.')[0]
cache.set(host, slug, 60*60)
#Cache the slug -> host mapping permanently.
redis_conn.sadd("rtd_slug:v1:%s" % slug, host)
request.slug = slug
request.urlconf = 'core.subdomain_urls'
except:
#Some crazy person is CNAMEing to us. 404.
if not settings.DEBUG:
raise Http404('Invalid Host Name.')
#Normal request.
return None
def process_response(self, request, response):
#Try and make this match as little as possible.
if response.status_code == 200 and '_static' not in request.path and '_images' not in request.path:
if getattr(request, 'add_badge', False):
response.content = replace_insensitive(
smart_unicode(response.content),
"</body>",
smart_unicode(OUR_CODE + "</body>"))
if response.get('Content-Length', None):
response['Content-Length'] = len(response.content)
return response
| alex/readthedocs.org | readthedocs/core/middleware.py | Python | mit | 4,693 |
import json
from Crypto.PublicKey import RSA
from decimal import Decimal
class Util:
@staticmethod
def construct_key_from_data(rsa_data):
k = json.loads(rsa_data['whole'])
key = RSA.construct((
long(k['n']),
long(k['e']),
long(k['d']),
long(k['p']),
long(k['q']),
long(k['u'])))
return key
@staticmethod
def construct_pubkey_from_data(rsa_data):
key = RSA.construct((
long(rsa_data['n']),
long(rsa_data['e'])))
return key
@staticmethod
def create_future_transaction(btc, prevtx, outputs, amount_available, receiver_address, locktime):
inputs = []
for tx in prevtx:
inputs.append({'txid': tx['txid'], 'vout': tx['vout']})
cash_back = amount_available
for oracle, fee in outputs.iteritems():
cash_back -= Decimal(fee)
outputs[receiver_address] = cash_back
vout = {}
for address, value in outputs.iteritems():
# My heart bleeds when I write it
vout[address] = float(value)
transaction = btc.create_raw_transaction(inputs, vout)
return transaction
| orisi/orisi | src/oracle/handlers/bounty_contract/util.py | Python | mit | 1,107 |
#!/usr/bin/python
import sys
import re
import codecs
argc = len(sys.argv)
if argc != 3:
sys.stderr.write('error: wrong number of arguments.\n')
exit(1)
input = sys.argv[1]
output = sys.argv[2]
locre = re.compile(r'^"(.*)" = "(.*)";$')
commentre = re.compile(r'^(/\*.*\*/)|(//.*)$')
inputfile = codecs.open(input, 'r', 'utf-16')
lines = inputfile.readlines()
inputfile.close()
if len (lines) > 1 or ( len (lines) == 1 and len (lines[0]) > 1 ):
outputfile = codecs.open(output, 'w', 'utf-16')
outputfile.seek(0)
foundstrings = set()
comment = None
for line in lines:
if commentre.match(line) != None:
comment = line
else:
match = locre.match(line)
if match != None:
string = match.group(2)
key = string.encode('ascii', 'backslashreplace').replace('\\u', '\\U')
if key not in foundstrings:
foundstrings.add(key)
outputfile.write('\n')
if comment != None:
outputfile.write(comment)
value = match.group(2)
line = '"' + key + '" = "' + string + '";\n'
outputfile.write(line)
comment = None
elif line != '\n':
sys.stderr.write('warning: confused about line in {0}:\n{1}\n'.format(input, line))
outputfile.close()
| JackieXie168/skim | postprocess-xib-strings.py | Python | bsd-3-clause | 1,458 |
# -*- coding: utf-8 -*-
import datetime
import functools
import logging
import markdown
import pytz
from addons.base.models import BaseNodeSettings
from bleach.callbacks import nofollow
from bleach import Cleaner
from functools import partial
from bleach.linkifier import LinkifyFilter
from django.db import models
from framework.forms.utils import sanitize
from markdown.extensions import codehilite, fenced_code, wikilinks
from osf.models import AbstractNode, NodeLog, OSFUser
from osf.models.base import BaseModel, GuidMixin, ObjectIDMixin
from osf.utils.fields import NonNaiveDateTimeField
from osf.utils.requests import DummyRequest, get_request_and_user_id
from website import settings
from addons.wiki import utils as wiki_utils
from website.exceptions import NodeStateError
from website.util import api_v2_url
from website.files.exceptions import VersionNotFoundError
from osf.utils.requests import get_headers_from_request
from .exceptions import (
NameEmptyError,
NameInvalidError,
NameMaximumLengthError,
)
logger = logging.getLogger(__name__)
SHAREJS_HOST = 'localhost'
SHAREJS_PORT = 7007
SHAREJS_URL = '{}:{}'.format(SHAREJS_HOST, SHAREJS_PORT)
SHAREJS_DB_NAME = 'sharejs'
SHAREJS_DB_URL = 'mongodb://{}:{}/{}'.format(settings.DB_HOST, settings.DB_PORT, SHAREJS_DB_NAME)
# TODO: Change to release date for wiki change
WIKI_CHANGE_DATE = datetime.datetime.utcfromtimestamp(1423760098).replace(tzinfo=pytz.utc)
def validate_page_name(value):
value = (value or '').strip()
if not value:
# TODO: determine if this if possible anymore, deprecate if not
raise NameEmptyError('Page name cannot be blank.')
if value.find('/') != -1:
raise NameInvalidError('Page name cannot contain forward slashes.')
if len(value) > 100:
raise NameMaximumLengthError('Page name cannot be greater than 100 characters.')
return True
def build_html_output(content, node):
return markdown.markdown(
content,
extensions=[
wikilinks.WikiLinkExtension(
configs=[
('base_url', ''),
('end_url', ''),
('build_url', functools.partial(build_wiki_url, node))
]
),
fenced_code.FencedCodeExtension(),
codehilite.CodeHiliteExtension(
[('css_class', 'highlight')]
)
]
)
def render_content(content, node):
html_output = build_html_output(content, node)
# linkify gets called after santize, because we're adding rel="nofollow"
# to <a> elements - but don't want to allow them for other elements.
sanitized_content = sanitize(html_output, **settings.WIKI_WHITELIST)
return sanitized_content
def build_wiki_url(node, label, base, end):
return '/{pid}/wiki/{wname}/'.format(pid=node._id, wname=label)
class WikiVersion(ObjectIDMixin, BaseModel):
user = models.ForeignKey('osf.OSFUser', null=True, blank=True, on_delete=models.CASCADE)
wiki_page = models.ForeignKey('WikiPage', null=True, blank=True, on_delete=models.CASCADE, related_name='versions')
content = models.TextField(default='', blank=True)
identifier = models.IntegerField(default=1)
@property
def is_current(self):
return not self.wiki_page.deleted and self.identifier == self.wiki_page.current_version_number
def html(self, node):
"""The cleaned HTML of the page"""
html_output = build_html_output(self.content, node=node)
try:
cleaner = Cleaner(
tags=settings.WIKI_WHITELIST['tags'],
attributes=settings.WIKI_WHITELIST['attributes'],
styles=settings.WIKI_WHITELIST['styles'],
filters=[partial(LinkifyFilter, callbacks=[nofollow, ])]
)
return cleaner.clean(html_output)
except TypeError:
logger.warning('Returning unlinkified content.')
return render_content(self.content, node=node)
def raw_text(self, node):
""" The raw text of the page, suitable for using in a test search"""
return sanitize(self.html(node), tags=[], strip=True)
@property
def rendered_before_update(self):
return self.created < WIKI_CHANGE_DATE
def get_draft(self, node):
"""
Return most recently edited version of wiki, whether that is the
last saved version or the most recent sharejs draft.
"""
db = wiki_utils.share_db()
sharejs_uuid = wiki_utils.get_sharejs_uuid(node, self.wiki_page.page_name)
doc_item = db['docs'].find_one({'_id': sharejs_uuid})
if doc_item:
sharejs_version = doc_item['_v']
sharejs_timestamp = doc_item['_m']['mtime']
sharejs_timestamp /= 1000 # Convert to appropriate units
sharejs_date = datetime.datetime.utcfromtimestamp(sharejs_timestamp).replace(tzinfo=pytz.utc)
if sharejs_version > 1 and sharejs_date > self.created:
return doc_item['_data']
return self.content
def save(self, *args, **kwargs):
rv = super(WikiVersion, self).save(*args, **kwargs)
if self.wiki_page.node:
self.wiki_page.node.update_search()
self.wiki_page.modified = self.created
self.wiki_page.save()
self.spam_check()
return rv
def spam_check(self):
# Since wiki_pages_current will be removed from Node model, when a new WikiVersion is saved, trigger a spam check.
request, user_id = get_request_and_user_id()
request_headers = {}
if not isinstance(request, DummyRequest):
request_headers = {
k: v
for k, v in get_headers_from_request(request).items()
if isinstance(v, basestring)
}
user = OSFUser.load(user_id)
return self.wiki_page.node.check_spam(user, ['wiki_pages_latest'], request_headers)
def clone_version(self, wiki_page, user):
"""Clone a node wiki page.
:param wiki_page: The wiki_page you want attached to the clone.
:return: The cloned wiki version
"""
clone = self.clone()
clone.wiki_page = wiki_page
clone.user = user
clone.save()
return clone
@property
def absolute_api_v2_url(self):
path = '/wiki_versions/{}/'.format(self._id)
return api_v2_url(path)
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
class WikiPage(GuidMixin, BaseModel):
page_name = models.CharField(max_length=200, validators=[validate_page_name, ])
user = models.ForeignKey('osf.OSFUser', null=True, blank=True, on_delete=models.CASCADE)
node = models.ForeignKey('osf.AbstractNode', null=True, blank=True, on_delete=models.CASCADE, related_name='wikis')
deleted = NonNaiveDateTimeField(blank=True, null=True, db_index=True)
class Meta:
indexes = [
models.Index(fields=['page_name', 'node'])
]
def save(self, *args, **kwargs):
rv = super(WikiPage, self).save(*args, **kwargs)
if self.node and self.node.is_public:
self.node.update_search()
return rv
def update_active_sharejs(self, node):
"""
Update all active sharejs sessions with latest wiki content.
"""
"""
TODO: This def is meant to be used after updating wiki content via
the v2 API, once updating has been implemented. It should be removed
if not used for that purpose.
"""
sharejs_uuid = wiki_utils.get_sharejs_uuid(node, self.page_name)
contributors = [user._id for user in node.contributors]
wiki_utils.broadcast_to_sharejs('reload',
sharejs_uuid,
data=contributors)
def belongs_to_node(self, node_id):
"""Check whether the wiki is attached to the specified node."""
return self.node._id == node_id
@property
def current_version_number(self):
return self.versions.count()
@property
def url(self):
return u'{}wiki/{}/'.format(self.node.url, self.page_name)
def create_version(self, user, content):
version = WikiVersion(user=user, wiki_page=self, content=content, identifier=self.current_version_number + 1)
version.save()
return version
def get_version(self, version=None):
try:
if version:
return self.versions.get(identifier=version)
return self.versions.last()
except (WikiVersion.DoesNotExist, ValueError):
raise VersionNotFoundError(version)
def get_versions(self):
return self.versions.all().order_by('-created')
def rename(self, new_name, save=True):
self.page_name = new_name
if save:
self.save()
@property
def root_target_page(self):
"""The comment page type associated with WikiPages."""
return 'wiki'
@property
def deep_url(self):
return u'{}wiki/{}/'.format(self.node.deep_url, self.page_name)
def clone_wiki_page(self, copy, user, save=True):
"""Clone a wiki page.
:param node: The Node of the cloned wiki page
:return: The cloned wiki page
"""
new_wiki_page = self.clone()
new_wiki_page.node = copy
new_wiki_page.user = user
new_wiki_page.save()
for version in self.versions.all().order_by('created'):
new_version = version.clone_version(new_wiki_page, user)
if save:
new_version.save()
return
@classmethod
def clone_wiki_pages(cls, node, copy, user, save=True):
"""Clone wiki pages for a forked or registered project.
First clones the WikiPage, then clones all WikiPage versions.
:param node: The Node that was forked/registered
:param copy: The fork/registration
:param user: The user who forked or registered the node
:param save: Whether to save the fork/registration
:return: copy
"""
for wiki_page in node.wikis.filter(deleted__isnull=True):
wiki_page.clone_wiki_page(copy, user, save)
return copy
def to_json(self, user):
return {}
def get_extra_log_params(self, comment):
return {'wiki': {'name': self.page_name, 'url': comment.get_comment_page_url()}}
# For Comment API compatibility
@property
def target_type(self):
"""The object "type" used in the OSF v2 API."""
return 'wiki'
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
@property
def absolute_api_v2_url(self):
path = '/wikis/{}/'.format(self._id)
return api_v2_url(path)
class NodeWikiPage(GuidMixin, BaseModel):
page_name = models.CharField(max_length=200, validators=[validate_page_name, ])
version = models.IntegerField(default=1)
date = NonNaiveDateTimeField(auto_now_add=True)
content = models.TextField(default='', blank=True)
user = models.ForeignKey('osf.OSFUser', null=True, blank=True, on_delete=models.CASCADE)
node = models.ForeignKey('osf.AbstractNode', null=True, blank=True, on_delete=models.CASCADE)
former_guid = models.CharField(null=True, blank=True, max_length=100, db_index=True)
@property
def is_current(self):
key = wiki_utils.to_mongo_key(self.page_name)
if key in self.node.wiki_pages_current:
return self.node.wiki_pages_current[key] == self._id
else:
return False
@property
def deep_url(self):
return u'{}wiki/{}/'.format(self.node.deep_url, self.page_name)
@property
def url(self):
return u'{}wiki/{}/'.format(self.node.url, self.page_name)
@property
def rendered_before_update(self):
return self.date < WIKI_CHANGE_DATE
# For Comment API compatibility
@property
def target_type(self):
"""The object "type" used in the OSF v2 API."""
return 'wiki'
@property
def root_target_page(self):
"""The comment page type associated with NodeWikiPages."""
return 'wiki'
@property
def is_deleted(self):
key = wiki_utils.to_mongo_key(self.page_name)
return key not in self.node.wiki_pages_current
@property
def absolute_api_v2_url(self):
path = '/wikis/{}/'.format(self._id)
return api_v2_url(path)
def belongs_to_node(self, node_id):
"""Check whether the wiki is attached to the specified node."""
return self.node._id == node_id
def get_extra_log_params(self, comment):
return {'wiki': {'name': self.page_name, 'url': comment.get_comment_page_url()}}
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
def html(self, node):
"""The cleaned HTML of the page"""
html_output = build_html_output(self.content, node=node)
try:
cleaner = Cleaner(
tags=settings.WIKI_WHITELIST['tags'],
attributes=settings.WIKI_WHITELIST['attributes'],
styles=settings.WIKI_WHITELIST['styles'],
filters=[partial(LinkifyFilter, callbacks=[nofollow, ])]
)
return cleaner.clean(html_output)
except TypeError:
logger.warning('Returning unlinkified content.')
return render_content(self.content, node=node)
def raw_text(self, node):
""" The raw text of the page, suitable for using in a test search"""
return sanitize(self.html(node), tags=[], strip=True)
def get_draft(self, node):
"""
Return most recently edited version of wiki, whether that is the
last saved version or the most recent sharejs draft.
"""
db = wiki_utils.share_db()
sharejs_uuid = wiki_utils.get_sharejs_uuid(node, self.page_name)
doc_item = db['docs'].find_one({'_id': sharejs_uuid})
if doc_item:
sharejs_version = doc_item['_v']
sharejs_timestamp = doc_item['_m']['mtime']
sharejs_timestamp /= 1000 # Convert to appropriate units
sharejs_date = datetime.datetime.utcfromtimestamp(sharejs_timestamp).replace(tzinfo=pytz.utc)
if sharejs_version > 1 and sharejs_date > self.date:
return doc_item['_data']
return self.content
def update_active_sharejs(self, node):
"""
Update all active sharejs sessions with latest wiki content.
"""
"""
TODO: This def is meant to be used after updating wiki content via
the v2 API, once updating is has been implemented. It should be removed
if not used for that purpose.
"""
sharejs_uuid = wiki_utils.get_sharejs_uuid(node, self.page_name)
contributors = [user._id for user in node.contributors]
wiki_utils.broadcast_to_sharejs('reload',
sharejs_uuid,
data=contributors)
def save(self, *args, **kwargs):
rv = super(NodeWikiPage, self).save(*args, **kwargs)
if self.node:
self.node.update_search()
return rv
def rename(self, new_name, save=True):
self.page_name = new_name
if save:
self.save()
def to_json(self):
return {}
def clone_wiki(self, node_id):
"""Clone a node wiki page.
:param node: The Node of the cloned wiki page
:return: The cloned wiki page
"""
node = AbstractNode.load(node_id)
if not node:
raise ValueError('Invalid node')
clone = self.clone()
clone.node = node
clone.user = self.user
clone.save()
return clone
@classmethod
def clone_wiki_versions(cls, node, copy, user, save=True):
"""Clone wiki pages for a forked or registered project.
:param node: The Node that was forked/registered
:param copy: The fork/registration
:param user: The user who forked or registered the node
:param save: Whether to save the fork/registration
:return: copy
"""
copy.wiki_pages_versions = {}
copy.wiki_pages_current = {}
for key in node.wiki_pages_versions:
copy.wiki_pages_versions[key] = []
for wiki_id in node.wiki_pages_versions[key]:
node_wiki = NodeWikiPage.load(wiki_id)
cloned_wiki = node_wiki.clone_wiki(copy._id)
copy.wiki_pages_versions[key].append(cloned_wiki._id)
if node_wiki.is_current:
copy.wiki_pages_current[key] = cloned_wiki._id
if save:
copy.save()
return copy
class NodeSettings(BaseNodeSettings):
complete = True
has_auth = True
is_publicly_editable = models.BooleanField(default=False, db_index=True)
def set_editing(self, permissions, auth=None, log=False):
"""Set the editing permissions for this node.
:param auth: All the auth information including user, API key
:param bool permissions: True = publicly editable
:param bool save: Whether to save the privacy change
:param bool log: Whether to add a NodeLog for the privacy change
if true the node object is also saved
"""
node = self.owner
if permissions and not self.is_publicly_editable:
if node.is_public:
self.is_publicly_editable = True
else:
raise NodeStateError('Private components cannot be made publicly editable.')
elif not permissions and self.is_publicly_editable:
self.is_publicly_editable = False
else:
raise NodeStateError('Desired permission change is the same as current setting.')
if log:
node.add_log(
action=(NodeLog.MADE_WIKI_PUBLIC
if self.is_publicly_editable
else NodeLog.MADE_WIKI_PRIVATE),
params={
'project': node.parent_id,
'node': node._primary_key,
},
auth=auth,
save=True,
)
self.save()
def after_fork(self, node, fork, user, save=True):
"""Copy wiki settings and wiki pages to forks."""
WikiPage.clone_wiki_pages(node, fork, user, save)
return super(NodeSettings, self).after_fork(node, fork, user, save)
def after_register(self, node, registration, user, save=True):
"""Copy wiki settings and wiki pages to registrations."""
WikiPage.clone_wiki_pages(node, registration, user, save)
clone = self.clone()
clone.owner = registration
if save:
clone.save()
return clone, None
def after_set_privacy(self, node, permissions):
"""
:param Node node:
:param str permissions:
:return str: Alert message
"""
if permissions == 'private':
if self.is_publicly_editable:
self.set_editing(permissions=False, log=False)
return (
'The wiki of {name} is now only editable by write contributors.'.format(
name=node.title,
)
)
def to_json(self, user):
return {}
| binoculars/osf.io | addons/wiki/models.py | Python | apache-2.0 | 19,644 |
from django.core.context_processors import csrf
from django.contrib.auth.models import User
from identity_providers.models import Cas
from openid_provider.models import *
def getUser(user):
values = {}
values['username'] = str(user)
try:
student = Cas.objects.get(user=user)
values['student_id'] = str(student.student_id)
except Cas.DoesNotExist: pass
return values
| RaduGatej/SensibleData-Platform | sensible_data_platform/accounts/manager.py | Python | mit | 383 |
# Generated by Django 3.0.7 on 2020-08-08 07:15
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('company', '0022_auto_20200613_1045'),
]
operations = [
migrations.AlterModelOptions(
name='company',
options={'ordering': ['name']},
),
]
| inventree/InvenTree | InvenTree/company/migrations/0023_auto_20200808_0715.py | Python | mit | 346 |
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for cinder.api.urlmap.py
"""
from cinder.api import urlmap
from cinder import test
class TestParseFunctions(test.TestCase):
def test_unquote_header_value_without_quotes(self):
arg = 'TestString'
result = urlmap.unquote_header_value(arg)
self.assertEqual(arg, result)
def test_unquote_header_value_with_quotes(self):
result = urlmap.unquote_header_value('"TestString"')
self.assertEqual('TestString', result)
def test_parse_list_header(self):
arg = 'token, "quoted value"'
result = urlmap.parse_list_header(arg)
self.assertEqual(['token', 'quoted value'], result)
def test_parse_options_header(self):
result = urlmap.parse_options_header('Content-Type: text/html;'
' mimetype=text/html')
self.assertEqual(('Content-Type:', {'mimetype': 'text/html'}), result)
def test_parse_options_header_without_value(self):
result = urlmap.parse_options_header(None)
self.assertEqual(('', {}), result)
class TestAccept(test.TestCase):
def test_best_match_ValueError(self):
arg = 'text/html; q=some_invalud_value'
accept = urlmap.Accept(arg)
self.assertEqual((None, {}), accept.best_match(['text/html']))
def test_best_match(self):
arg = '*/*; q=0.7, application/json; q=0.7, text/html; q=-0.8'
accept = urlmap.Accept(arg)
self.assertEqual(('application/json', {'q': '0.7'}),
accept.best_match(['application/json',
'application/xml', 'text/html']))
def test_match_mask_one_asterisk(self):
arg = 'text/*; q=0.7'
accept = urlmap.Accept(arg)
self.assertEqual(('text/html', {'q': '0.7'}),
accept.best_match(['text/html']))
def test_match_mask_two_asterisk(self):
arg = '*/*; q=0.7'
accept = urlmap.Accept(arg)
self.assertEqual(('text/html', {'q': '0.7'}),
accept.best_match(['text/html']))
def test_match_mask_no_asterisk(self):
arg = 'application/json; q=0.7'
accept = urlmap.Accept(arg)
self.assertEqual((None, {}), accept.best_match(['text/html']))
def test_content_type_params(self):
arg = "application/xml; q=0.1, application/json; q=0.2," \
" text/html; q=0.3"
accept = urlmap.Accept(arg)
self.assertEqual({'q': '0.2'},
accept.content_type_params('application/json'))
def test_content_type_params_wrong_content_type(self):
arg = 'application/xml; q=0.1, text/html; q=0.1'
accept = urlmap.Accept(arg)
self.assertEqual({}, accept.content_type_params('application/json'))
class TestUrlMapFactory(test.TestCase):
def setUp(self):
super(TestUrlMapFactory, self).setUp()
self.global_conf = {'not_found_app': 'app_global',
'domain hoobar.com port 10 /': 'some_app_global'}
self.loader = self.mox.CreateMockAnything()
def test_not_found_app_in_local_conf(self):
local_conf = {'not_found_app': 'app_local',
'domain foobar.com port 20 /': 'some_app_local'}
self.loader.get_app('app_local', global_conf=self.global_conf).\
AndReturn('app_local_loader')
self.loader.get_app('some_app_local', global_conf=self.global_conf).\
AndReturn('some_app_loader')
self.mox.ReplayAll()
expected_urlmap = urlmap.URLMap(not_found_app='app_local_loader')
expected_urlmap['http://foobar.com:20'] = 'some_app_loader'
self.assertEqual(expected_urlmap,
urlmap.urlmap_factory(self.loader, self.global_conf,
**local_conf))
def test_not_found_app_not_in_local_conf(self):
local_conf = {'domain foobar.com port 20 /': 'some_app_local'}
self.loader.get_app('app_global', global_conf=self.global_conf).\
AndReturn('app_global_loader')
self.loader.get_app('some_app_local', global_conf=self.global_conf).\
AndReturn('some_app_returned_by_loader')
self.mox.ReplayAll()
expected_urlmap = urlmap.URLMap(not_found_app='app_global_loader')
expected_urlmap['http://foobar.com:20'] = 'some_app_returned'\
'_by_loader'
self.assertEqual(expected_urlmap,
urlmap.urlmap_factory(self.loader, self.global_conf,
**local_conf))
def test_not_found_app_is_none(self):
local_conf = {'not_found_app': None,
'domain foobar.com port 20 /': 'some_app_local'}
self.loader.get_app('some_app_local', global_conf=self.global_conf).\
AndReturn('some_app_returned_by_loader')
self.mox.ReplayAll()
expected_urlmap = urlmap.URLMap(not_found_app=None)
expected_urlmap['http://foobar.com:20'] = 'some_app_returned'\
'_by_loader'
self.assertEqual(expected_urlmap,
urlmap.urlmap_factory(self.loader, self.global_conf,
**local_conf))
class TestURLMap(test.TestCase):
def setUp(self):
super(TestURLMap, self).setUp()
self.urlmap = urlmap.URLMap()
self.input_environ = {'HTTP_ACCEPT': "application/json;"
"version=9.0", 'REQUEST_METHOD': "GET",
'CONTENT_TYPE': 'application/xml',
'SCRIPT_NAME': '/scriptname',
'PATH_INFO': "/resource.xml"}
self.environ = {'HTTP_ACCEPT': "application/json;"
"version=9.0", 'REQUEST_METHOD': "GET",
'CONTENT_TYPE': 'application/xml',
'SCRIPT_NAME': '/scriptname/app_url',
'PATH_INFO': "/resource.xml"}
def test_match_with_applications(self):
self.urlmap[('http://10.20.30.40:50', '/path/somepath')] = 'app'
self.assertEqual((None, None),
self.urlmap._match('20.30.40.50', '20',
'path/somepath'))
def test_match_without_applications(self):
self.assertEqual((None, None),
self.urlmap._match('host', 20, 'app_url/somepath'))
def test_match_path_info_equals_app_url(self):
self.urlmap[('http://20.30.40.50:60', '/app_url/somepath')] = 'app'
self.assertEqual(('app', '/app_url/somepath'),
self.urlmap._match('http://20.30.40.50', '60',
'/app_url/somepath'))
def test_match_path_info_equals_app_url_many_app(self):
self.urlmap[('http://20.30.40.50:60', '/path')] = 'app1'
self.urlmap[('http://20.30.40.50:60', '/path/somepath')] = 'app2'
self.urlmap[('http://20.30.40.50:60', '/path/somepath/elsepath')] = \
'app3'
self.assertEqual(('app3', '/path/somepath/elsepath'),
self.urlmap._match('http://20.30.40.50', '60',
'/path/somepath/elsepath'))
def test_set_script_name(self):
app = self.mox.CreateMockAnything()
start_response = self.mox.CreateMockAnything()
app.__call__(self.environ, start_response).AndReturn('value')
self.mox.ReplayAll()
wrap = self.urlmap._set_script_name(app, '/app_url')
self.assertEqual('value', wrap(self.input_environ, start_response))
def test_munge_path(self):
app = self.mox.CreateMockAnything()
start_response = self.mox.CreateMockAnything()
app.__call__(self.environ, start_response).AndReturn('value')
self.mox.ReplayAll()
wrap = self.urlmap._munge_path(app, '/app_url/resource.xml',
'/app_url')
self.assertEqual('value', wrap(self.input_environ, start_response))
def test_content_type_strategy_without_version(self):
self.assertEqual(None,
self.urlmap._content_type_strategy('host', 20,
self.environ))
def test_content_type_strategy_with_version(self):
environ = {'HTTP_ACCEPT': "application/vnd.openstack.melange+xml;"
"version=9.0", 'REQUEST_METHOD': "GET",
'PATH_INFO': "/resource.xml",
'CONTENT_TYPE': 'application/xml; version=2.0'}
self.urlmap[('http://10.20.30.40:50', '/v2.0')] = 'app'
self.mox.StubOutWithMock(self.urlmap, '_set_script_name')
self.urlmap._set_script_name('app', '/v2.0').AndReturn('value')
self.mox.ReplayAll()
self.assertEqual('value',
self.urlmap._content_type_strategy(
'http://10.20.30.40', '50', environ))
def test_path_strategy_wrong_path_info(self):
self.assertEqual((None, None, None),
self.urlmap._path_strategy('http://10.20.30.40', '50',
'/resource'))
def test_path_strategy_mime_type_only(self):
self.assertEqual(('application/xml', None, None),
self.urlmap._path_strategy('http://10.20.30.40', '50',
'/resource.xml'))
def test_path_strategy(self):
self.urlmap[('http://10.20.30.40:50', '/path/elsepath/')] = 'app'
self.mox.StubOutWithMock(self.urlmap, '_munge_path')
self.urlmap._munge_path('app', '/path/elsepath/resource.xml',
'/path/elsepath').AndReturn('value')
self.mox.ReplayAll()
self.assertEqual(
('application/xml', 'value', '/path/elsepath'),
self.urlmap._path_strategy('http://10.20.30.40', '50',
'/path/elsepath/resource.xml'))
def test_path_strategy_wrong_mime_type(self):
self.urlmap[('http://10.20.30.40:50', '/path/elsepath/')] = 'app'
self.mox.StubOutWithMock(self.urlmap, '_munge_path')
self.urlmap._munge_path('app', '/path/elsepath/resource.abc',
'/path/elsepath').AndReturn('value')
self.mox.ReplayAll()
self.assertEqual(
(None, 'value', '/path/elsepath'),
self.urlmap._path_strategy('http://10.20.30.40', '50',
'/path/elsepath/resource.abc'))
def test_accept_strategy_version_not_in_params(self):
environ = {'HTTP_ACCEPT': "application/xml; q=0.1, application/json; "
"q=0.2", 'REQUEST_METHOD': "GET",
'PATH_INFO': "/resource.xml",
'CONTENT_TYPE': 'application/xml; version=2.0'}
self.assertEqual(('application/xml', None),
self.urlmap._accept_strategy('http://10.20.30.40',
'50',
environ,
['application/xml']))
def test_accept_strategy_version(self):
environ = {'HTTP_ACCEPT': "application/xml; q=0.1; version=1.0,"
"application/json; q=0.2; version=2.0",
'REQUEST_METHOD': "GET", 'PATH_INFO': "/resource.xml",
'CONTENT_TYPE': 'application/xml; version=2.0'}
self.urlmap[('http://10.20.30.40:50', '/v1.0')] = 'app'
self.mox.StubOutWithMock(self.urlmap, '_set_script_name')
self.urlmap._set_script_name('app', '/v1.0').AndReturn('value')
self.mox.ReplayAll()
self.assertEqual(('application/xml', 'value'),
self.urlmap._accept_strategy('http://10.20.30.40',
'50',
environ,
['application/xml']))
| nikesh-mahalka/cinder | cinder/tests/unit/test_api_urlmap.py | Python | apache-2.0 | 12,846 |
#!/usr/bin/env python2
# Copyright (c) 2014-2016, NVIDIA CORPORATION. All rights reserved.
import argparse
import logging
import os
import sys
import PIL.Image
# Add path for DIGITS package
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import digits.config # noqa
from digits import utils, log # noqa
logger = logging.getLogger('digits.tools.resize_image')
def validate_output_file(filename):
if filename is None:
return True
if os.path.exists(filename):
if not os.access(filename, os.W_OK):
logger.error('cannot overwrite existing output file "%s"' % filename)
return False
output_dir = os.path.dirname(filename)
if not output_dir:
output_dir = '.'
if not os.path.exists(output_dir):
logger.error('output directory "%s" does not exist' % output_dir)
return False
if not os.access(output_dir, os.W_OK):
logger.error('you do not have write access to output directory "%s"' % output_dir)
return False
return True
def validate_input_file(filename):
if not os.path.exists(filename) or not os.path.isfile(filename):
logger.error('input file "%s" does not exist' % filename)
return False
if not os.access(filename, os.R_OK):
logger.error('you do not have read access to "%s"' % filename)
return False
return True
def validate_range(number, min_value=None, max_value=None, allow_none=False):
if number is None:
if allow_none:
return True
else:
logger.error('invalid value %s' % number)
return False
try:
float(number)
except ValueError:
logger.error('invalid value %s' % number)
return False
if min_value is not None and number < min_value:
logger.error('invalid value %s' % number)
return False
if max_value is not None and number > max_value:
logger.error('invalid value %s' % number)
return False
return True
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Resize-Image tool - DIGITS')
# Positional arguments
parser.add_argument('image',
help='A filesystem path or url to the image'
)
parser.add_argument('output',
help='The location to output the image'
)
parser.add_argument('width',
type=int,
help='The new width'
)
parser.add_argument('height',
type=int,
help='The new height'
)
# Optional arguments
parser.add_argument('-c', '--channels',
type=int,
help='The new number of channels [default is to remain unchanged]'
)
parser.add_argument('-m', '--mode',
default='squash',
help='Resize mode (squash/crop/fill/half_crop) [default is squash]'
)
args = vars(parser.parse_args())
for valid in [
validate_range(args['width'], min_value=1),
validate_range(args['height'], min_value=1),
validate_range(args['channels'],
min_value=1, max_value=3, allow_none=True),
validate_output_file(args['output']),
]:
if not valid:
sys.exit(1)
# load image
image = utils.image.load_image(args['image'])
# resize image
image = utils.image.resize_image(image, args['height'], args['width'],
channels=args['channels'],
resize_mode=args['mode'],
)
image = PIL.Image.fromarray(image)
try:
image.save(args['output'])
except KeyError:
logger.error('Unable to save file to "%s"' % args['output'])
sys.exit(1)
| gheinrich/DIGITS-GAN | digits/tools/resize_image.py | Python | bsd-3-clause | 4,054 |
# Zulip's OpenAPI-based API documentation system is documented at
# https://zulip.readthedocs.io/en/latest/documentation/api.html
#
# This file contains helper functions for generating cURL examples
# based on Zulip's OpenAPI definitions, as well as test setup and
# fetching of appropriate parameter values to use when running the
# cURL examples as part of the tools/test-api test suite.
from functools import wraps
from typing import Any, Callable, Dict, List, Optional, Set, Tuple
from django.utils.timezone import now as timezone_now
from zerver.lib.actions import (
do_add_linkifier,
do_add_reaction,
do_add_realm_playground,
do_create_user,
update_user_presence,
)
from zerver.lib.events import do_events_register
from zerver.lib.initial_password import initial_password
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.upload import upload_message_file
from zerver.lib.users import get_api_key
from zerver.models import Client, Message, UserGroup, UserPresence, get_realm, get_user
GENERATOR_FUNCTIONS: Dict[str, Callable[[], Dict[str, object]]] = {}
REGISTERED_GENERATOR_FUNCTIONS: Set[str] = set()
CALLED_GENERATOR_FUNCTIONS: Set[str] = set()
# This is a List rather than just a string in order to make it easier
# to write to it from another module.
AUTHENTICATION_LINE: List[str] = [""]
helpers = ZulipTestCase()
def openapi_param_value_generator(
endpoints: List[str],
) -> Callable[[Callable[[], Dict[str, object]]], Callable[[], Dict[str, object]]]:
"""This decorator is used to register OpenAPI param value genarator functions
with endpoints. Example usage:
@openapi_param_value_generator(["/messages/render:post"])
def ...
"""
def wrapper(generator_func: Callable[[], Dict[str, object]]) -> Callable[[], Dict[str, object]]:
@wraps(generator_func)
def _record_calls_wrapper() -> Dict[str, object]:
CALLED_GENERATOR_FUNCTIONS.add(generator_func.__name__)
return generator_func()
REGISTERED_GENERATOR_FUNCTIONS.add(generator_func.__name__)
for endpoint in endpoints:
GENERATOR_FUNCTIONS[endpoint] = _record_calls_wrapper
return _record_calls_wrapper
return wrapper
def assert_all_helper_functions_called() -> None:
"""Throws an exception if any registered helpers were not called by tests"""
if REGISTERED_GENERATOR_FUNCTIONS == CALLED_GENERATOR_FUNCTIONS:
return
uncalled_functions = str(REGISTERED_GENERATOR_FUNCTIONS - CALLED_GENERATOR_FUNCTIONS)
raise Exception(f"Registered curl API generators were not called: {uncalled_functions}")
def patch_openapi_example_values(
entry: str,
params: List[Dict[str, Any]],
request_body: Optional[Dict[str, Any]] = None,
) -> Tuple[List[Dict[str, object]], Optional[Dict[str, object]]]:
if entry not in GENERATOR_FUNCTIONS:
return params, request_body
func = GENERATOR_FUNCTIONS[entry]
realm_example_values: Dict[str, object] = func()
for param in params:
param_name = param["name"]
if param_name in realm_example_values:
if "content" in param:
param["content"]["application/json"]["example"] = realm_example_values[param_name]
else:
param["example"] = realm_example_values[param_name]
if request_body is not None:
properties = request_body["content"]["multipart/form-data"]["schema"]["properties"]
for key, property in properties.items():
if key in realm_example_values:
property["example"] = realm_example_values[key]
return params, request_body
@openapi_param_value_generator(["/fetch_api_key:post"])
def fetch_api_key() -> Dict[str, object]:
email = helpers.example_email("iago")
password = initial_password(email)
return {
"username": email,
"password": password,
}
@openapi_param_value_generator(
[
"/messages/{message_id}:get",
"/messages/{message_id}/history:get",
"/messages/{message_id}:patch",
"/messages/{message_id}:delete",
]
)
def iago_message_id() -> Dict[str, object]:
iago = helpers.example_user("iago")
helpers.subscribe(iago, "Denmark")
return {
"message_id": helpers.send_stream_message(iago, "Denmark"),
}
@openapi_param_value_generator(["/messages/{message_id}/reactions:delete"])
def add_emoji_to_message() -> Dict[str, object]:
user_profile = helpers.example_user("iago")
# The message ID here is hardcoded based on the corresponding value
# for the example message IDs we use in zulip.yaml.
message_id = 44
emoji_name = "octopus"
emoji_code = "1f419"
reaction_type = "unicode_emoji"
message = Message.objects.select_related().get(id=message_id)
do_add_reaction(user_profile, message, emoji_name, emoji_code, reaction_type)
return {}
@openapi_param_value_generator(["/messages/flags:post"])
def update_flags_message_ids() -> Dict[str, object]:
stream_name = "Venice"
helpers.subscribe(helpers.example_user("iago"), stream_name)
messages = []
for _ in range(3):
messages.append(helpers.send_stream_message(helpers.example_user("iago"), stream_name))
return {
"messages": messages,
}
@openapi_param_value_generator(["/mark_stream_as_read:post", "/users/me/{stream_id}/topics:get"])
def get_venice_stream_id() -> Dict[str, object]:
return {
"stream_id": helpers.get_stream_id("Venice"),
}
@openapi_param_value_generator(["/streams/{stream_id}:patch"])
def update_stream() -> Dict[str, object]:
stream = helpers.subscribe(helpers.example_user("iago"), "temp_stream 1")
return {
"stream_id": stream.id,
}
@openapi_param_value_generator(["/streams/{stream_id}:delete"])
def create_temp_stream_and_get_id() -> Dict[str, object]:
stream = helpers.subscribe(helpers.example_user("iago"), "temp_stream 2")
return {
"stream_id": stream.id,
}
@openapi_param_value_generator(["/mark_topic_as_read:post"])
def get_denmark_stream_id_and_topic() -> Dict[str, object]:
stream_name = "Denmark"
topic_name = "Tivoli Gardens"
helpers.subscribe(helpers.example_user("iago"), stream_name)
helpers.send_stream_message(helpers.example_user("hamlet"), stream_name, topic_name=topic_name)
return {
"stream_id": helpers.get_stream_id(stream_name),
"topic_name": topic_name,
}
@openapi_param_value_generator(["/users/me/subscriptions/properties:post"])
def update_subscription_data() -> Dict[str, object]:
profile = helpers.example_user("iago")
helpers.subscribe(profile, "Verona")
helpers.subscribe(profile, "social")
return {
"subscription_data": [
{"stream_id": helpers.get_stream_id("Verona"), "property": "pin_to_top", "value": True},
{"stream_id": helpers.get_stream_id("social"), "property": "color", "value": "#f00f00"},
],
}
@openapi_param_value_generator(["/users/me/subscriptions:delete"])
def delete_subscription_data() -> Dict[str, object]:
iago = helpers.example_user("iago")
zoe = helpers.example_user("ZOE")
helpers.subscribe(iago, "Verona")
helpers.subscribe(iago, "social")
helpers.subscribe(zoe, "Verona")
helpers.subscribe(zoe, "social")
return {}
@openapi_param_value_generator(["/events:get"])
def get_events() -> Dict[str, object]:
profile = helpers.example_user("iago")
helpers.subscribe(profile, "Verona")
client = Client.objects.create(name="curl-test-client-1")
response = do_events_register(profile, client, event_types=["message", "realm_emoji"])
helpers.send_stream_message(helpers.example_user("hamlet"), "Verona")
return {
"queue_id": response["queue_id"],
"last_event_id": response["last_event_id"],
}
@openapi_param_value_generator(["/events:delete"])
def delete_event_queue() -> Dict[str, object]:
profile = helpers.example_user("iago")
client = Client.objects.create(name="curl-test-client-2")
response = do_events_register(profile, client, event_types=["message"])
return {
"queue_id": response["queue_id"],
"last_event_id": response["last_event_id"],
}
@openapi_param_value_generator(["/users/{user_id_or_email}/presence:get"])
def get_user_presence() -> Dict[str, object]:
iago = helpers.example_user("iago")
client = Client.objects.create(name="curl-test-client-3")
update_user_presence(iago, client, timezone_now(), UserPresence.ACTIVE, False)
return {}
@openapi_param_value_generator(["/users:post"])
def create_user() -> Dict[str, object]:
return {
"email": helpers.nonreg_email("test"),
}
@openapi_param_value_generator(["/user_groups/create:post"])
def create_user_group_data() -> Dict[str, object]:
return {
"members": [helpers.example_user("hamlet").id, helpers.example_user("othello").id],
}
@openapi_param_value_generator(
["/user_groups/{user_group_id}:patch", "/user_groups/{user_group_id}:delete"]
)
def get_temp_user_group_id() -> Dict[str, object]:
user_group, _ = UserGroup.objects.get_or_create(name="temp", realm=get_realm("zulip"))
return {
"user_group_id": user_group.id,
}
@openapi_param_value_generator(["/realm/filters/{filter_id}:delete"])
def remove_realm_filters() -> Dict[str, object]:
filter_id = do_add_linkifier(
get_realm("zulip"), "#(?P<id>[0-9]{2,8})", "https://github.com/zulip/zulip/pull/%(id)s"
)
return {
"filter_id": filter_id,
}
@openapi_param_value_generator(["/realm/emoji/{emoji_name}:post", "/user_uploads:post"])
def upload_custom_emoji() -> Dict[str, object]:
return {
"filename": "zerver/tests/images/animated_img.gif",
}
@openapi_param_value_generator(["/realm/playgrounds:post"])
def add_realm_playground() -> Dict[str, object]:
return {
"name": "Python2 playground",
"pygments_language": "Python2",
"url_prefix": "https://python2.example.com",
}
@openapi_param_value_generator(["/realm/playgrounds/{playground_id}:delete"])
def remove_realm_playground() -> Dict[str, object]:
playground_info = dict(
name="Python playground",
pygments_language="Python",
url_prefix="https://python.example.com",
)
playground_id = do_add_realm_playground(get_realm("zulip"), **playground_info)
return {
"playground_id": playground_id,
}
@openapi_param_value_generator(["/users/{user_id}:delete"])
def deactivate_user() -> Dict[str, object]:
user_profile = do_create_user(
email="[email protected]",
password=None,
full_name="test_user",
realm=get_realm("zulip"),
acting_user=None,
)
return {"user_id": user_profile.id}
@openapi_param_value_generator(["/users/me:delete"])
def deactivate_own_user() -> Dict[str, object]:
test_user_email = "[email protected]"
deactivate_test_user = do_create_user(
test_user_email,
"secret",
get_realm("zulip"),
"Mr. Delete",
role=200,
acting_user=None,
)
realm = get_realm("zulip")
test_user = get_user(test_user_email, realm)
test_user_api_key = get_api_key(test_user)
# change authentication line to allow test_client to delete itself.
AUTHENTICATION_LINE[0] = f"{deactivate_test_user.email}:{test_user_api_key}"
return {}
@openapi_param_value_generator(["/attachments/{attachment_id}:delete"])
def remove_attachment() -> Dict[str, object]:
user_profile = helpers.example_user("iago")
url = upload_message_file("dummy.txt", len(b"zulip!"), "text/plain", b"zulip!", user_profile)
attachment_id = url.replace("/user_uploads/", "").split("/")[0]
return {"attachment_id": attachment_id}
| eeshangarg/zulip | zerver/openapi/curl_param_value_generators.py | Python | apache-2.0 | 11,845 |
from base64 import b64decode, b64encode
from collections import Container, Mapping, OrderedDict, Sequence
import math
from sh import chmod, Command, mkdir, tar
import six
from ..decorators import signature
from ..err import Err
from .core import onepath, Source, twopaths
class Inline(Source):
@onepath
def cache(self, cache):
body = cache.join('data')
with open(body, 'w') as h:
h.write(self.data)
class InlineText(Inline):
@signature(str)
def __init__(self, text):
if not stringsafe(text):
raise ValueError('Please use InlineBinary for non-ASCII text.')
self.text = text
@twopaths
def place(self, cache, path):
mkdir('-p', path.dirname)
with open(str(path), 'w') as h:
h.write(self.data)
@onepath
def run(self, cache, args=[]):
f = cache.join('data')
self.place(cache, f)
chmod('a+rx', str(f))
cmd = Command(str(f))
cmd(*args)
@property
def data(self):
return six.b(self.text.strip() + '\n')
def externalize(self):
return dict(text=self.text)
def __repr__(self):
return '%s(%r)' % (type(self).__name__, clip(self.text[:20]))
class InlineBinary(Inline):
@signature(bytes)
def __init__(self, data):
self.data = data
@classmethod
def base64(cls, text):
return cls(b64decode(text))
@twopaths
def place(self, cache, path):
mkdir('-p', path.dirname)
with open(str(path), 'w') as h:
h.write(self.data)
@onepath
def run(self, cache, args=[]):
f = cache.join('data')
self.place(cache, f)
chmod('a+rx', str(f))
cmd = Command(str(f))
cmd(*args)
def externalize(self):
lines = break_up_base64(b64encode(self.data))
para = '\n'.join(b.decode() for b in lines)
return dict(base64=para)
def __repr__(self):
return '%s(%r)' % (type(self).__name__,
clip(self.data[:20], ellipsis=six.b('...')))
class InlineTarGZ(InlineBinary):
@onepath
def run(self, cache, args=[]):
raise NoExecutingInlineTars()
@twopaths
def place(self, cache, path):
mkdir('-p', path)
tar('-xz', '-C', str(path), _in=self.data)
with open(str(path), 'w') as h:
h.write(self.data)
def externalize(self):
para = super(InlineTarGZ, self).externalize()['base64']
return dict(tgz64=para)
class InlineJar(InlineBinary):
@onepath
def run(self, cache, args=[]):
jar = cache.join('data.jar')
self.place(cache, jar)
cmd = Command('java')
cmd('-jar', str(jar), *args)
def externalize(self):
para = super(InlineJar, self).externalize()['base64']
return dict(jar64=para)
class InlineCollection(Inline):
@signature((Container, OrderedDict))
def __init__(self, collection):
self.collection = collection
@twopaths
def place(self, _cache, path):
InlineCollection.unpack_collection(path, self.collection)
@onepath
def run(self, _cache, _args=[]):
raise NoExecutingCollections('Collections can not be executed.')
@staticmethod
@onepath
def unpack_pairs(under, pairs):
for path, data in pairs:
full = under.join(path)
if isinstance(data, Container):
InlineCollection.unpack_collection(full, data)
else:
mkdir('-p', full.dirname)
# TODO: rm -rf, check links, &c
with open(str(full), 'w') as h:
if isinstance(data, bytes):
h.write(bytes)
if hasattr(data, 'read'):
h.write(data.read())
h.write(str(data).strip() + '\n')
@staticmethod
def unpack_collection(under, collection):
pairs = None
if isinstance(Mapping, collection):
pairs = collection.items()
if isinstance(Sequence, collection):
fmt = '%0' + math.ceil(math.log(len(collection), 10)) + 's'
pairs = ((fmt % i, data) for i, data in enumerate(collection))
if pairs is None:
raise UnhandledCollection('Collection type %s is not handled.',
type(collection).__name__)
InlineCollection.unpack_pairs(under, pairs)
class UnhandledCollection(Err):
pass
class NoExecutingCollections(Err):
pass
class NoExecutingInlineTars(Err):
pass
def clip(something, ellipsis='...', n=20):
return something if len(something) <= n else something[:n] + ellipsis
def break_up_base64(data, n=64):
while len(data) > 0:
yield data[:n]
data = data[n:]
def stringsafe(data):
text_code_points = (set([0x07, 0x08, 0x09, 0x0a, 0x0c, 0x0d, 0x1b]) |
set(range(0x20, 0x100)) - set([0x7f]))
data = data if isinstance(data, six.binary_type) else six.b(data)
return len(data.translate(None, bytearray(text_code_points))) == 0
| drcloud/arx | arx/sources/inline.py | Python | mit | 5,118 |
#!/usr/bin/python
#
# Copyright Friday Film Club. All Rights Reserved.
"""Usersearch unit tests."""
__author__ = '[email protected] (Adam McGrath)'
import unittest
from google.appengine.api import search
from google.appengine.ext import ndb
import base
import helpers
import usersearch
class TasksTestCase(base.TestCase):
def testTokenise(self):
tests = [
('foo', ['f', 'fo', 'foo', 'o', 'oo']),
('b a r', ['a', 'b', 'r']),
('ba z', ['a', 'b', 'ba', 'z']),
]
for test in tests:
self.assertEqual(sorted(usersearch.tokenize(test[0])), test[1])
def testIndexUsers(self):
users = [
helpers.user(id='foo', username='foo'),
helpers.user(id='bar', username='bar'),
]
ndb.put_multi(users)
usersearch.index_users(users)
index = search.Index(name='users')
self.assertIsNotNone(index.get('foo'))
self.assertIsNotNone(index.get('bar'))
usersearch.remove_users([u.key for u in users])
self.assertIsNone(index.get('foo'))
self.assertIsNone(index.get('bar'))
if __name__ == '__main__':
unittest.main()
| adamjmcgrath/fridayfilmclub | src/tests/test_usersearch.py | Python | mpl-2.0 | 1,099 |
# -*- coding: utf-8 -*-
from __future__ import division
'''Test for checking variation of initial prestress force along a
post-tensioned member.
Data and rough calculation are taken from
Example 4.3 of the topic 4 of course "Prestressed Concrete Design
(SAB 4323) by Baderul Hisham Ahmad
ocw.utm.my
Problem statement:
Determine the initial prestress force distribution
along the beam if the anchorage draw-in is 5
mm. Given the following:
• Span = 20m, μ= 0.25 & K = 17 x 10-4 per metre
• fpi = 1239 N/ mm2 ; A ps = 2850 mm2
• e at both ends = 0
• e at mid-span = 558 mm
• Es = 195 kN/mm2
'''
__author__= "Ana Ortega (AO_O)"
__copyright__= "Copyright 2017, AO_O"
__license__= "GPL"
__version__= "3.0"
__email__= "[email protected]"
import numpy as np
import math
from materials.prestressing import prestressed_concrete as presconc
from model.geometry import geom_utils
#Geometry
lBeam=20 #beam span [m]
#Parabola
eEnds=0 #eccentricity of cables at both ends of the beam
eMidspan=-0.558 #eccentricity of cables at midspan [m]
angl_Parab_XZ=math.pi/4 #angle between the vertical plane that contains the
#parabola and the plane XZ
#Material
Ep=195e9 #elastic modulus of prestressing steel [Pa]
#Prestressing process
mu=0.25 #coefficient of friction between the cables and their sheating
k=0.0017 #wobble coefficient per meter length of cable [1/m]
sigmap0max=1239e6 #Initial stress of cable [Pa]
Aps=2850e-6 #Area of cable [m2]
# Interpolation
n_points_rough=5 #number of points provided to the interpolation algorithm
n_points_fine=101 #number of points interpolated
#Anchorage slip
deltaL=5e-3 #anchorage draw-in (provided by manufacturer) [m]
#Rough results from direct calculation (formula):
lp_anch_lhe=419.3 #loss of prestress force at left-hand end anchorage [kN]
fl_frc=15.82 #loss of prestress due to friction [kN/m]
P_le=3111.9 #prestress force at left end [kN]
P_ms=3270.1 #prestress force at midspan [kN]
P_re=3214.8 #prestress force at right end [kN]
# XC model
#Tendon [m] definition, layout and friction losses
a,b,c=geom_utils.fit_parabola(x=np.array([0,lBeam/2.0,lBeam]), y=np.array([eEnds,eMidspan,eEnds]))
x_parab_rough,y_parab_rough,z_parab_rough=geom_utils.eq_points_parabola(0,lBeam,n_points_rough,a,b,c,angl_Parab_XZ)
tendon=presconc.PrestressTendon([])
tendon.roughCoordMtr=np.array([x_parab_rough,y_parab_rough,z_parab_rough])
#Interpolated 3D spline
tendon.pntsInterpTendon(n_points_fine,smoothness=1,kgrade=3)
# Losses of prestressing due to friction
lssFrict=tendon.getLossFriction(coefFric=mu,k=k,sigmaP0_extr1=sigmap0max,sigmaP0_extr2=0.0)
# Losses of prestressing due to anchorage slip (loss due to friction must be
# previously calculated
lssAnch=tendon.getLossAnchor(Ep=Ep,anc_slip_extr1=deltaL,anc_slip_extr2=0.0)
Laffected=tendon.projXYcoordZeroAnchLoss[0] # effective length of tendon
#affected by the anchorage slip in extremity 1 [m]
# Results
lssAnch_e1=lssAnch[0] #prestress loss due to anchorage draw-in extremity 1
lssAnch_md=lssAnch[int(len(lssAnch)/2)] #prestress loss due to anchorage draw-in midspan
lssAnch_e2=lssAnch[-1] #prestress loss due to anchorage draw-in extremity 2
lssFrict_e1=lssFrict[0] #prestress loss due to friction extremity 1
lssFrict_md=lssFrict[int(len(lssFrict)/2)] #prestress loss due to friction midspan
lssFrict_e2=lssFrict[-1] #prestress loss due to friction extremity 2
P_extr1=(sigmap0max-lssAnch_e1-lssFrict_e1)*Aps*1e-3
P_midspan=(sigmap0max-lssAnch_md-lssFrict_md)*Aps*1e-3
P_extr2=(sigmap0max-lssAnch_e2-lssFrict_e2)*Aps*1e-3
ratio1=abs(P_extr1-P_le)/P_le
ratio2=abs(P_midspan-P_ms)/P_ms
ratio3=abs(P_extr2-P_re)/P_re
import os
from miscUtils import LogMessages as lmsg
fname= os.path.basename(__file__)
if (ratio1<5.e-3 and ratio2<5.e-4 and ratio3<5.e-3):
print "test ",fname,": ok."
else:
lmsg.error(fname+' ERROR.')
| lcpt/xc | verif/tests/materials/prestressing/test_short_term_loss_prestress_01.py | Python | gpl-3.0 | 3,985 |
# -*- coding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404
from django.urls import reverse
from spirit.core.utils.http import safe_redirect
from spirit.core.utils.views import is_post, post_data, is_ajax
from spirit.core.utils import json_response
from spirit.comment.models import Comment
from .models import CommentLike
from .forms import LikeForm
@login_required
def create(request, comment_id):
comment = get_object_or_404(
Comment.objects.exclude(user=request.user),
pk=comment_id)
form = LikeForm(
user=request.user,
comment=comment,
data=post_data(request))
if is_post(request) and form.is_valid():
like = form.save()
like.comment.increase_likes_count()
if is_ajax(request):
return json_response({'url_delete': like.get_delete_url()})
return safe_redirect(request, 'next', comment.get_absolute_url(), method='POST')
return render(
request=request,
template_name='spirit/comment/like/create.html',
context={
'form': form,
'comment': comment})
@login_required
def delete(request, pk):
like = get_object_or_404(CommentLike, pk=pk, user=request.user)
if is_post(request):
like.delete()
like.comment.decrease_likes_count()
if is_ajax(request):
url = reverse(
'spirit:comment:like:create',
kwargs={'comment_id': like.comment.pk})
return json_response({'url_create': url, })
return safe_redirect(
request, 'next', like.comment.get_absolute_url(), method='POST')
return render(
request=request,
template_name='spirit/comment/like/delete.html',
context={'like': like})
| nitely/Spirit | spirit/comment/like/views.py | Python | mit | 1,843 |
'''
Performs both local sequence alignment given a list of
user-provided input sequences as a FASTA file.
'''
from parameter import PairwiseAlignmentArgumentValidator, PairwiseAlignmentCommandParser, InputWrapperState
from pairwise import PairwiseDriver
from random import shuffle
from score_converter import ScoreConversionDriver
import os
os.system("taskset -p 0xff %d" % os.getpid())
version = 1.0
if __name__ == '__main__':
try:
args = PairwiseAlignmentCommandParser().parse_args()
PairwiseAlignmentArgumentValidator(args) # test all arguments are correct
print('spaghetti - v.' + str(version) + '\n=================')
input_state = InputWrapperState(args)
input_state.assign_matrix() # parse in-built or custom matrix
queries = input_state.parse_fasta(input_state.fname) # next, parse fasta file
if input_state.fname2 is None:
targets = queries
else:
targets = input_state.parse_fasta(input_state.fname2)
if input_state.get_args()['mode'] == 'align':
driver = PairwiseDriver(targets, queries, input_state)
driver.start() # start only pairwise alignment
elif input_state.get_args()['mode'] == 'calculate_percharacter':
if input_state.get_args()['rawscore_file'] is None:
raise IOError('While using mode "calculate_percharacter", a rawscore_file must be provided.')
driver = ScoreConversionDriver(input_state,queries,targets)
driver.start()
except (IOError, KeyboardInterrupt, IndexError) as e:
print(str(e)+'\n')
| tgillet1/PASTA | spaghetti.py | Python | mit | 1,632 |
from .api import (process_pc_neighborhood,
process_pc_pathsbetween,
process_pc_pathsfromto,
process_owl,
process_model)
| pvtodorov/indra | indra/sources/biopax/__init__.py | Python | bsd-2-clause | 192 |
from formula.Formula import EMA
# MACD
class DATA():
def __init__(self, diff, dea):
self.Set(diff, dea)
def Set(self, diff, dea):
self.diff = diff;
self.dea = dea;
self.macd = 2 * (self.diff - self.dea);
def __str__(self):
return 'diff={0},dea={1},macd={2}'.format(self.diff, self.dea, self.macd);
class MACD():
def __init__(self, short=12, long=26, diff=9):
self.EMAShort = [];
self.EMALong = [];
self.DEA = [];
self.DIFF = [];
self.short = short;
self.diff = diff;
self.long = long;
def Input(self, klines):
prices = klines.prices;
EMA(klines.prices, self.EMAShort, self.short);
EMA(klines.prices, self.EMALong, self.long);
ld = len(self.DIFF);
lr = len(self.EMAShort);
for idx in range(ld, lr):
self.DIFF.append(self.EMAShort[idx] - self.EMALong[idx]);
EMA(self.DIFF, self.DEA, self.diff);
def Get(self, index):
return DATA(self.DIFF[index], self.DEA[index])
def __str__(self):
str = '';
l = len(self.EMAShort);
for k in range(0, l):
str = str + self.Get(indx).__str__() + '\n';
return str;
def __len__(self):
return len(self.DIFF); | WaitGodot/peatio-client-python | formula/MACD.py | Python | cc0-1.0 | 1,349 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import messages
from horizon import exceptions
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class AddInterface(forms.SelfHandlingForm):
subnet_id = forms.ChoiceField(label=_("Subnet"), required=False)
router_name = forms.CharField(label=_("Router Name"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
router_id = forms.CharField(label=_("Router ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
failure_url = 'horizon:project:routers:detail'
def __init__(self, request, *args, **kwargs):
super(AddInterface, self).__init__(request, *args, **kwargs)
c = self.populate_subnet_id_choices(request)
self.fields['subnet_id'].choices = c
def populate_subnet_id_choices(self, request):
tenant_id = self.request.user.tenant_id
networks = []
try:
networks = api.quantum.network_list_for_tenant(request, tenant_id)
except Exception as e:
msg = _('Failed to get network list %s') % e.message
LOG.info(msg)
messages.error(request, msg)
redirect = reverse(self.failure_url,
args=[request.REQUEST['router_id']])
exceptions.handle(request, msg, redirect=redirect)
return
choices = []
for n in networks:
net_name = n.name + ': ' if n.name else ''
choices += [(subnet.id,
'%s%s (%s)' % (net_name, subnet.cidr,
subnet.name or subnet.id))
for subnet in n['subnets']]
if choices:
choices.insert(0, ("", _("Select Subnet")))
else:
choices.insert(0, ("", _("No subnets available.")))
return choices
def handle(self, request, data):
try:
api.quantum.router_add_interface(request,
data['router_id'],
subnet_id=data['subnet_id'])
msg = _('Interface added')
LOG.debug(msg)
messages.success(request, msg)
return True
except Exception as e:
msg = _('Failed to add_interface %s') % e.message
LOG.info(msg)
messages.error(request, msg)
redirect = reverse(self.failure_url, args=[data['router_id']])
exceptions.handle(request, msg, redirect=redirect)
class SetGatewayForm(forms.SelfHandlingForm):
network_id = forms.ChoiceField(label=_("External Network"), required=False)
router_name = forms.CharField(label=_("Router Name"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
router_id = forms.CharField(label=_("Router ID"),
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(SetGatewayForm, self).__init__(request, *args, **kwargs)
c = self.populate_network_id_choices(request)
self.fields['network_id'].choices = c
def populate_network_id_choices(self, request):
search_opts = {'router:external': True}
try:
networks = api.quantum.network_list(request, **search_opts)
except Exception as e:
msg = _('Failed to get network list %s') % e.message
LOG.info(msg)
messages.error(request, msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return
choices = [(network.id, network.name or network.id)
for network in networks]
if choices:
choices.insert(0, ("", _("Select network")))
else:
choices.insert(0, ("", _("No networks available.")))
return choices
def handle(self, request, data):
try:
api.quantum.router_add_gateway(request,
data['router_id'],
data['network_id'])
msg = _('Gateway interface is added')
LOG.debug(msg)
messages.success(request, msg)
return True
except Exception as e:
msg = _('Failed to set gateway %s') % e.message
LOG.info(msg)
messages.error(request, msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
| trunglq7/horizon | openstack_dashboard/dashboards/project/routers/ports/forms.py | Python | apache-2.0 | 5,612 |
import logging
from horoscope_parser.hs_parser import (
DailyCommonHoroscopeReceiver, DailyCommonHoroscopeParser
)
from horoscope_parser.hs_saver import DailyCommonHoroscopeDBSaver
from .constants import ANIMALS_DICT
logger = logging.getLogger(__name__)
class DailyCommonHoroscopeCollectorManager(object):
def collect_horoscopes(self):
for animal in ANIMALS_DICT:
collector = DailyCommonHoroscopeCollector(animal)
collector.collect()
class DailyCommonHoroscopeCollector(object):
def __init__(self, animal):
self._animal = animal
def collect(self):
logger.info("Start receiving horoscopes for {}...".format(self._animal))
receiver = DailyCommonHoroscopeReceiver(self._animal)
horoscope_xml = receiver.get_horoscope()
logger.info("Done receiving horoscopes for {}...".format(self._animal))
logger.info("Start parsing...")
parser = DailyCommonHoroscopeParser(horoscope_xml)
horoscope_data = parser.get_parsed_horoscope()
logger.info("Finished parsing...")
logger.info("Saving to database...")
db_saver = DailyCommonHoroscopeDBSaver(horoscope_data)
db_saver.save()
logger.info("Finished saving to database...")
logger.info("Finished horoscopes for {}...".format(self._animal))
| adiletmaratov/horoscope-bot | src/horoscope_parser/clients.py | Python | mit | 1,341 |
"""
Current driven domain-wall motion with constant current and spin accumulation.
"""
# Copyright (C) 2011-2015 Claas Abert
#
# This file is part of magnum.fe.
#
# magnum.fe is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# magnum.fe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with magnum.fe. If not, see <http://www.gnu.org/licenses/>.
#
# Last modified by Claas Abert, 2015-02-16
from magnumfe import *
#######################################
#### DEFINE MESH, STATE AND MATERIAL
#######################################
mesh = BoxMesh(-600.0/2, -100.0/2, -10.0/2, 600.0/2, 100.0/2, 10.0/2, 120, 20, 1)
state = State(mesh, scale = 1e-9,
material = Material(
alpha = 0.1,
ms = 8e5,
Aex = 1.3e-11,
D0 = 1e-3,
beta = 0.9,
beta_prime = 0.8,
lambda_sf = 10e-9,
lambda_j = 4e-9,
c = 3.125e-3
),
m = Expression(('1.0 - 2*(x[0] < 0.0)', 'x[0] > -10.0 && x[0] < 10.0', '0.0')),
s = Constant((0.0, 0.0, 0.0)),
j = Constant((0.0, 0.0, 0.0))
)
# normalize since initial configuration is not normalized
state.m.normalize()
# setup integrators
llg = LLGAlougesProject([
ExchangeField(),
DemagField("FK"),
SpinTorque()
])
spindiff = SpinDiffusion()
# relax
for j in range(200): state.step(llg, 1e-12)
# apply constant current
state.j = Constant((3e12, 0, 0))
state.t = 0.0
# prepare log files
mfile = File("data/m.pvd")
sfile = File("data/s.pvd")
for j in range(1000):
# save fields every 10th step
if j % 10 == 0:
mfile << (state.m, state.t)
sfile << (state.s, state.t)
# calculate next step
state.step([llg, spindiff], 1e-12)
| micromagnetics/magnum.fe | examples/current_wall_motion/run.py | Python | lgpl-3.0 | 2,142 |
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from eventlet import greenthread
import mock
import uuid
from nova.compute import power_state
from nova.compute import task_states
from nova import context
from nova import exception
from nova import objects
from nova.objects import fields
from nova.pci import manager as pci_manager
from nova import test
from nova.tests.unit import fake_flavor
from nova.tests.unit import fake_instance
from nova.tests.unit.virt.xenapi import stubs
from nova.virt import fake
from nova.virt.xenapi import agent as xenapi_agent
from nova.virt.xenapi.client import session as xenapi_session
from nova.virt.xenapi import fake as xenapi_fake
from nova.virt.xenapi import vm_utils
from nova.virt.xenapi import vmops
from nova.virt.xenapi import volume_utils
from nova.virt.xenapi import volumeops
class VMOpsTestBase(stubs.XenAPITestBaseNoDB):
def setUp(self):
super(VMOpsTestBase, self).setUp()
self._setup_mock_vmops()
self.vms = []
def _setup_mock_vmops(self, product_brand=None, product_version=None):
stubs.stubout_session(self.stubs, xenapi_fake.SessionBase)
self._session = xenapi_session.XenAPISession('test_url', 'root',
'test_pass')
self.vmops = vmops.VMOps(self._session, fake.FakeVirtAPI())
def create_vm(self, name, state="Running"):
vm_ref = xenapi_fake.create_vm(name, state)
self.vms.append(vm_ref)
vm = xenapi_fake.get_record("VM", vm_ref)
return vm, vm_ref
def tearDown(self):
super(VMOpsTestBase, self).tearDown()
for vm in self.vms:
xenapi_fake.destroy_vm(vm)
class VMOpsTestCase(VMOpsTestBase):
def setUp(self):
super(VMOpsTestCase, self).setUp()
self._setup_mock_vmops()
self.context = context.RequestContext('user', 'project')
self.instance = fake_instance.fake_instance_obj(self.context)
def _setup_mock_vmops(self, product_brand=None, product_version=None):
self._session = self._get_mock_session(product_brand, product_version)
self._vmops = vmops.VMOps(self._session, fake.FakeVirtAPI())
def _get_mock_session(self, product_brand, product_version):
class Mock(object):
pass
mock_session = Mock()
mock_session.product_brand = product_brand
mock_session.product_version = product_version
return mock_session
def _test_finish_revert_migration_after_crash(self, backup_made, new_made,
vm_shutdown=True):
instance = {'name': 'foo',
'task_state': task_states.RESIZE_MIGRATING}
context = 'fake_context'
self.mox.StubOutWithMock(vm_utils, 'lookup')
self.mox.StubOutWithMock(self._vmops, '_destroy')
self.mox.StubOutWithMock(vm_utils, 'set_vm_name_label')
self.mox.StubOutWithMock(self._vmops, '_attach_mapped_block_devices')
self.mox.StubOutWithMock(self._vmops, '_start')
self.mox.StubOutWithMock(vm_utils, 'is_vm_shutdown')
vm_utils.lookup(self._session, 'foo-orig').AndReturn(
backup_made and 'foo' or None)
vm_utils.lookup(self._session, 'foo').AndReturn(
(not backup_made or new_made) and 'foo' or None)
if backup_made:
if new_made:
self._vmops._destroy(instance, 'foo')
vm_utils.set_vm_name_label(self._session, 'foo', 'foo')
self._vmops._attach_mapped_block_devices(instance, [])
vm_utils.is_vm_shutdown(self._session, 'foo').AndReturn(vm_shutdown)
if vm_shutdown:
self._vmops._start(instance, 'foo')
self.mox.ReplayAll()
self._vmops.finish_revert_migration(context, instance, [])
def test_finish_revert_migration_after_crash(self):
self._test_finish_revert_migration_after_crash(True, True)
def test_finish_revert_migration_after_crash_before_new(self):
self._test_finish_revert_migration_after_crash(True, False)
def test_finish_revert_migration_after_crash_before_backup(self):
self._test_finish_revert_migration_after_crash(False, False)
def test_xsm_sr_check_relaxed_cached(self):
self.make_plugin_call_count = 0
def fake_make_plugin_call(plugin, method, **args):
self.make_plugin_call_count = self.make_plugin_call_count + 1
return "true"
self.stubs.Set(self._vmops, "_make_plugin_call",
fake_make_plugin_call)
self.assertTrue(self._vmops._is_xsm_sr_check_relaxed())
self.assertTrue(self._vmops._is_xsm_sr_check_relaxed())
self.assertEqual(self.make_plugin_call_count, 1)
def test_get_vm_opaque_ref_raises_instance_not_found(self):
instance = {"name": "dummy"}
self.mox.StubOutWithMock(vm_utils, 'lookup')
vm_utils.lookup(self._session, instance['name'], False).AndReturn(None)
self.mox.ReplayAll()
self.assertRaises(exception.InstanceNotFound,
self._vmops._get_vm_opaque_ref, instance)
@mock.patch.object(vm_utils, 'destroy_vm')
@mock.patch.object(vm_utils, 'clean_shutdown_vm')
@mock.patch.object(vm_utils, 'hard_shutdown_vm')
def test_clean_shutdown_no_bdm_on_destroy(self, hard_shutdown_vm,
clean_shutdown_vm, destroy_vm):
vm_ref = 'vm_ref'
self._vmops._destroy(self.instance, vm_ref, destroy_disks=False)
hard_shutdown_vm.assert_called_once_with(self._vmops._session,
self.instance, vm_ref)
self.assertEqual(0, clean_shutdown_vm.call_count)
@mock.patch.object(vm_utils, 'destroy_vm')
@mock.patch.object(vm_utils, 'clean_shutdown_vm')
@mock.patch.object(vm_utils, 'hard_shutdown_vm')
def test_clean_shutdown_with_bdm_on_destroy(self, hard_shutdown_vm,
clean_shutdown_vm, destroy_vm):
vm_ref = 'vm_ref'
block_device_info = {'block_device_mapping': ['fake']}
self._vmops._destroy(self.instance, vm_ref, destroy_disks=False,
block_device_info=block_device_info)
clean_shutdown_vm.assert_called_once_with(self._vmops._session,
self.instance, vm_ref)
self.assertEqual(0, hard_shutdown_vm.call_count)
@mock.patch.object(vm_utils, 'destroy_vm')
@mock.patch.object(vm_utils, 'clean_shutdown_vm', return_value=False)
@mock.patch.object(vm_utils, 'hard_shutdown_vm')
def test_clean_shutdown_with_bdm_failed_on_destroy(self, hard_shutdown_vm,
clean_shutdown_vm, destroy_vm):
vm_ref = 'vm_ref'
block_device_info = {'block_device_mapping': ['fake']}
self._vmops._destroy(self.instance, vm_ref, destroy_disks=False,
block_device_info=block_device_info)
clean_shutdown_vm.assert_called_once_with(self._vmops._session,
self.instance, vm_ref)
hard_shutdown_vm.assert_called_once_with(self._vmops._session,
self.instance, vm_ref)
@mock.patch.object(vm_utils, 'try_auto_configure_disk')
@mock.patch.object(vm_utils, 'create_vbd',
side_effect=test.TestingException)
def test_attach_disks_rescue_auto_disk_config_false(self, create_vbd,
try_auto_config):
ctxt = context.RequestContext('user', 'project')
instance = fake_instance.fake_instance_obj(ctxt)
image_meta = objects.ImageMeta.from_dict(
{'properties': {'auto_disk_config': 'false'}})
vdis = {'root': {'ref': 'fake-ref'}}
self.assertRaises(test.TestingException, self._vmops._attach_disks,
instance, image_meta=image_meta, vm_ref=None,
name_label=None, vdis=vdis, disk_image_type='fake',
network_info=[], rescue=True)
self.assertFalse(try_auto_config.called)
@mock.patch.object(vm_utils, 'try_auto_configure_disk')
@mock.patch.object(vm_utils, 'create_vbd',
side_effect=test.TestingException)
def test_attach_disks_rescue_auto_disk_config_true(self, create_vbd,
try_auto_config):
ctxt = context.RequestContext('user', 'project')
instance = fake_instance.fake_instance_obj(ctxt)
image_meta = objects.ImageMeta.from_dict(
{'properties': {'auto_disk_config': 'true'}})
vdis = {'root': {'ref': 'fake-ref'}}
self.assertRaises(test.TestingException, self._vmops._attach_disks,
instance, image_meta=image_meta, vm_ref=None,
name_label=None, vdis=vdis, disk_image_type='fake',
network_info=[], rescue=True)
try_auto_config.assert_called_once_with(self._vmops._session,
'fake-ref', instance.flavor.root_gb)
class InjectAutoDiskConfigTestCase(VMOpsTestBase):
def test_inject_auto_disk_config_when_present(self):
vm, vm_ref = self.create_vm("dummy")
instance = {"name": "dummy", "uuid": "1234", "auto_disk_config": True}
self.vmops._inject_auto_disk_config(instance, vm_ref)
xenstore_data = vm['xenstore_data']
self.assertEqual(xenstore_data['vm-data/auto-disk-config'], 'True')
def test_inject_auto_disk_config_none_as_false(self):
vm, vm_ref = self.create_vm("dummy")
instance = {"name": "dummy", "uuid": "1234", "auto_disk_config": None}
self.vmops._inject_auto_disk_config(instance, vm_ref)
xenstore_data = vm['xenstore_data']
self.assertEqual(xenstore_data['vm-data/auto-disk-config'], 'False')
class GetConsoleOutputTestCase(VMOpsTestBase):
def test_get_console_output_works(self):
self.mox.StubOutWithMock(self.vmops, '_get_last_dom_id')
instance = {"name": "dummy"}
self.vmops._get_last_dom_id(instance, check_rescue=True).AndReturn(42)
self.mox.ReplayAll()
self.assertEqual("dom_id: 42", self.vmops.get_console_output(instance))
def test_get_console_output_throws_nova_exception(self):
self.mox.StubOutWithMock(self.vmops, '_get_last_dom_id')
instance = {"name": "dummy"}
# dom_id=0 used to trigger exception in fake XenAPI
self.vmops._get_last_dom_id(instance, check_rescue=True).AndReturn(0)
self.mox.ReplayAll()
self.assertRaises(exception.NovaException,
self.vmops.get_console_output, instance)
def test_get_dom_id_works(self):
instance = {"name": "dummy"}
vm, vm_ref = self.create_vm("dummy")
self.assertEqual(vm["domid"], self.vmops._get_dom_id(instance))
def test_get_dom_id_works_with_rescue_vm(self):
instance = {"name": "dummy"}
vm, vm_ref = self.create_vm("dummy-rescue")
self.assertEqual(vm["domid"],
self.vmops._get_dom_id(instance, check_rescue=True))
def test_get_dom_id_raises_not_found(self):
instance = {"name": "dummy"}
self.create_vm("not-dummy")
self.assertRaises(exception.NotFound, self.vmops._get_dom_id, instance)
def test_get_dom_id_works_with_vmref(self):
vm, vm_ref = self.create_vm("dummy")
self.assertEqual(vm["domid"],
self.vmops._get_dom_id(vm_ref=vm_ref))
class SpawnTestCase(VMOpsTestBase):
def _stub_out_common(self):
self.mox.StubOutWithMock(self.vmops, '_ensure_instance_name_unique')
self.mox.StubOutWithMock(self.vmops, '_ensure_enough_free_mem')
self.mox.StubOutWithMock(self.vmops, '_update_instance_progress')
self.mox.StubOutWithMock(vm_utils, 'determine_disk_image_type')
self.mox.StubOutWithMock(self.vmops, '_get_vdis_for_instance')
self.mox.StubOutWithMock(vm_utils, 'safe_destroy_vdis')
self.mox.StubOutWithMock(self.vmops._volumeops,
'safe_cleanup_from_vdis')
self.mox.StubOutWithMock(self.vmops, '_resize_up_vdis')
self.mox.StubOutWithMock(vm_utils,
'create_kernel_and_ramdisk')
self.mox.StubOutWithMock(vm_utils, 'destroy_kernel_ramdisk')
self.mox.StubOutWithMock(self.vmops, '_create_vm_record')
self.mox.StubOutWithMock(self.vmops, '_destroy')
self.mox.StubOutWithMock(self.vmops, '_attach_disks')
self.mox.StubOutWithMock(pci_manager, 'get_instance_pci_devs')
self.mox.StubOutWithMock(vm_utils, 'set_other_config_pci')
self.mox.StubOutWithMock(self.vmops, '_attach_orig_disks')
self.mox.StubOutWithMock(self.vmops, 'inject_network_info')
self.mox.StubOutWithMock(self.vmops, '_inject_hostname')
self.mox.StubOutWithMock(self.vmops, '_inject_instance_metadata')
self.mox.StubOutWithMock(self.vmops, '_inject_auto_disk_config')
self.mox.StubOutWithMock(self.vmops, '_file_inject_vm_settings')
self.mox.StubOutWithMock(self.vmops, '_create_vifs')
self.mox.StubOutWithMock(self.vmops.firewall_driver,
'setup_basic_filtering')
self.mox.StubOutWithMock(self.vmops.firewall_driver,
'prepare_instance_filter')
self.mox.StubOutWithMock(self.vmops, '_start')
self.mox.StubOutWithMock(self.vmops, '_wait_for_instance_to_start')
self.mox.StubOutWithMock(self.vmops,
'_configure_new_instance_with_agent')
self.mox.StubOutWithMock(self.vmops, '_remove_hostname')
self.mox.StubOutWithMock(self.vmops.firewall_driver,
'apply_instance_filter')
self.mox.StubOutWithMock(self.vmops, '_update_last_dom_id')
def _test_spawn(self, name_label_param=None, block_device_info_param=None,
rescue=False, include_root_vdi=True, throw_exception=None,
attach_pci_dev=False):
self._stub_out_common()
instance = {"name": "dummy", "uuid": "fake_uuid"}
name_label = name_label_param
if name_label is None:
name_label = "dummy"
image_meta = objects.ImageMeta.from_dict({"id": "image_id"})
context = "context"
session = self.vmops._session
injected_files = "fake_files"
admin_password = "password"
network_info = "net_info"
steps = 10
if rescue:
steps += 1
block_device_info = block_device_info_param
if block_device_info and not block_device_info['root_device_name']:
block_device_info = dict(block_device_info_param)
block_device_info['root_device_name'] = \
self.vmops.default_root_dev
di_type = "di_type"
vm_utils.determine_disk_image_type(image_meta).AndReturn(di_type)
step = 1
self.vmops._update_instance_progress(context, instance, step, steps)
vdis = {"other": {"ref": "fake_ref_2", "osvol": True}}
if include_root_vdi:
vdis["root"] = {"ref": "fake_ref"}
self.vmops._get_vdis_for_instance(context, instance,
name_label, image_meta, di_type,
block_device_info).AndReturn(vdis)
self.vmops._resize_up_vdis(instance, vdis)
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
kernel_file = "kernel"
ramdisk_file = "ramdisk"
vm_utils.create_kernel_and_ramdisk(context, session,
instance, name_label).AndReturn((kernel_file, ramdisk_file))
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
vm_ref = "fake_vm_ref"
self.vmops._ensure_instance_name_unique(name_label)
self.vmops._ensure_enough_free_mem(instance)
self.vmops._create_vm_record(context, instance, name_label,
di_type, kernel_file,
ramdisk_file, image_meta, rescue).AndReturn(vm_ref)
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
self.vmops._attach_disks(instance, image_meta, vm_ref, name_label,
vdis, di_type, network_info, rescue,
admin_password, injected_files)
if attach_pci_dev:
fake_dev = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': None,
'id': 1,
'compute_node_id': 1,
'address': '00:00.0',
'vendor_id': '1234',
'product_id': 'abcd',
'dev_type': fields.PciDeviceType.STANDARD,
'status': 'available',
'dev_id': 'devid',
'label': 'label',
'instance_uuid': None,
'extra_info': '{}',
}
pci_manager.get_instance_pci_devs(instance).AndReturn([fake_dev])
vm_utils.set_other_config_pci(self.vmops._session,
vm_ref,
"0/0000:00:00.0")
else:
pci_manager.get_instance_pci_devs(instance).AndReturn([])
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
self.vmops._inject_instance_metadata(instance, vm_ref)
self.vmops._inject_auto_disk_config(instance, vm_ref)
self.vmops._inject_hostname(instance, vm_ref, rescue)
self.vmops._file_inject_vm_settings(instance, vm_ref, vdis,
network_info)
self.vmops.inject_network_info(instance, network_info, vm_ref)
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
self.vmops._create_vifs(instance, vm_ref, network_info)
self.vmops.firewall_driver.setup_basic_filtering(instance,
network_info).AndRaise(NotImplementedError)
self.vmops.firewall_driver.prepare_instance_filter(instance,
network_info)
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
if rescue:
self.vmops._attach_orig_disks(instance, vm_ref)
step += 1
self.vmops._update_instance_progress(context, instance, step,
steps)
self.vmops._start(instance, vm_ref)
self.vmops._wait_for_instance_to_start(instance, vm_ref)
self.vmops._update_last_dom_id(vm_ref)
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
self.vmops._configure_new_instance_with_agent(instance, vm_ref,
injected_files, admin_password)
self.vmops._remove_hostname(instance, vm_ref)
step += 1
self.vmops._update_instance_progress(context, instance, step, steps)
self.vmops.firewall_driver.apply_instance_filter(instance,
network_info)
step += 1
last_call = self.vmops._update_instance_progress(context, instance,
step, steps)
if throw_exception:
last_call.AndRaise(throw_exception)
self.vmops._destroy(instance, vm_ref, network_info=network_info)
vm_utils.destroy_kernel_ramdisk(self.vmops._session, instance,
kernel_file, ramdisk_file)
vm_utils.safe_destroy_vdis(self.vmops._session, ["fake_ref"])
self.vmops._volumeops.safe_cleanup_from_vdis(["fake_ref_2"])
self.mox.ReplayAll()
self.vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info,
block_device_info_param, name_label_param, rescue)
def test_spawn(self):
self._test_spawn()
def test_spawn_with_alternate_options(self):
self._test_spawn(include_root_vdi=False, rescue=True,
name_label_param="bob",
block_device_info_param={"root_device_name": ""})
def test_spawn_with_pci_available_on_the_host(self):
self._test_spawn(attach_pci_dev=True)
def test_spawn_performs_rollback_and_throws_exception(self):
self.assertRaises(test.TestingException, self._test_spawn,
throw_exception=test.TestingException())
def _test_finish_migration(self, power_on=True, resize_instance=True,
throw_exception=None, booted_from_volume=False):
self._stub_out_common()
self.mox.StubOutWithMock(volumeops.VolumeOps, "connect_volume")
self.mox.StubOutWithMock(self.vmops._session, 'call_xenapi')
self.mox.StubOutWithMock(vm_utils, "import_all_migrated_disks")
self.mox.StubOutWithMock(self.vmops, "_attach_mapped_block_devices")
context = "context"
migration = {}
name_label = "dummy"
instance = {"name": name_label, "uuid": "fake_uuid",
"root_device_name": "/dev/xvda"}
disk_info = "disk_info"
network_info = "net_info"
image_meta = objects.ImageMeta.from_dict({"id": "image_id"})
block_device_info = {}
import_root = True
if booted_from_volume:
block_device_info = {'block_device_mapping': [
{'mount_device': '/dev/xvda',
'connection_info': {'data': 'fake-data'}}]}
import_root = False
volumeops.VolumeOps.connect_volume(
{'data': 'fake-data'}).AndReturn(('sr', 'vol-vdi-uuid'))
self.vmops._session.call_xenapi('VDI.get_by_uuid',
'vol-vdi-uuid').AndReturn('vol-vdi-ref')
session = self.vmops._session
self.vmops._ensure_instance_name_unique(name_label)
self.vmops._ensure_enough_free_mem(instance)
di_type = "di_type"
vm_utils.determine_disk_image_type(image_meta).AndReturn(di_type)
root_vdi = {"ref": "fake_ref"}
ephemeral_vdi = {"ref": "fake_ref_e"}
vdis = {"root": root_vdi, "ephemerals": {4: ephemeral_vdi}}
vm_utils.import_all_migrated_disks(self.vmops._session, instance,
import_root=import_root).AndReturn(vdis)
kernel_file = "kernel"
ramdisk_file = "ramdisk"
vm_utils.create_kernel_and_ramdisk(context, session,
instance, name_label).AndReturn((kernel_file, ramdisk_file))
vm_ref = "fake_vm_ref"
rescue = False
self.vmops._create_vm_record(context, instance, name_label,
di_type, kernel_file,
ramdisk_file, image_meta, rescue).AndReturn(vm_ref)
if resize_instance:
self.vmops._resize_up_vdis(instance, vdis)
self.vmops._attach_disks(instance, image_meta, vm_ref, name_label,
vdis, di_type, network_info, False, None, None)
self.vmops._attach_mapped_block_devices(instance, block_device_info)
pci_manager.get_instance_pci_devs(instance).AndReturn([])
self.vmops._inject_instance_metadata(instance, vm_ref)
self.vmops._inject_auto_disk_config(instance, vm_ref)
self.vmops._file_inject_vm_settings(instance, vm_ref, vdis,
network_info)
self.vmops.inject_network_info(instance, network_info, vm_ref)
self.vmops._create_vifs(instance, vm_ref, network_info)
self.vmops.firewall_driver.setup_basic_filtering(instance,
network_info).AndRaise(NotImplementedError)
self.vmops.firewall_driver.prepare_instance_filter(instance,
network_info)
if power_on:
self.vmops._start(instance, vm_ref)
self.vmops._wait_for_instance_to_start(instance, vm_ref)
self.vmops._update_last_dom_id(vm_ref)
self.vmops.firewall_driver.apply_instance_filter(instance,
network_info)
last_call = self.vmops._update_instance_progress(context, instance,
step=5, total_steps=5)
if throw_exception:
last_call.AndRaise(throw_exception)
self.vmops._destroy(instance, vm_ref, network_info=network_info)
vm_utils.destroy_kernel_ramdisk(self.vmops._session, instance,
kernel_file, ramdisk_file)
vm_utils.safe_destroy_vdis(self.vmops._session,
["fake_ref_e", "fake_ref"])
self.mox.ReplayAll()
self.vmops.finish_migration(context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info, power_on)
def test_finish_migration(self):
self._test_finish_migration()
def test_finish_migration_no_power_on(self):
self._test_finish_migration(power_on=False, resize_instance=False)
def test_finish_migration_booted_from_volume(self):
self._test_finish_migration(booted_from_volume=True)
def test_finish_migrate_performs_rollback_on_error(self):
self.assertRaises(test.TestingException, self._test_finish_migration,
power_on=False, resize_instance=False,
throw_exception=test.TestingException())
def test_remove_hostname(self):
vm, vm_ref = self.create_vm("dummy")
instance = {"name": "dummy", "uuid": "1234", "auto_disk_config": None}
self.mox.StubOutWithMock(self._session, 'call_xenapi')
self._session.call_xenapi("VM.remove_from_xenstore_data", vm_ref,
"vm-data/hostname")
self.mox.ReplayAll()
self.vmops._remove_hostname(instance, vm_ref)
self.mox.VerifyAll()
def test_reset_network(self):
class mock_agent(object):
def __init__(self):
self.called = False
def resetnetwork(self):
self.called = True
vm, vm_ref = self.create_vm("dummy")
instance = {"name": "dummy", "uuid": "1234", "auto_disk_config": None}
agent = mock_agent()
self.mox.StubOutWithMock(self.vmops, 'agent_enabled')
self.mox.StubOutWithMock(self.vmops, '_get_agent')
self.mox.StubOutWithMock(self.vmops, '_inject_hostname')
self.mox.StubOutWithMock(self.vmops, '_remove_hostname')
self.vmops.agent_enabled(instance).AndReturn(True)
self.vmops._get_agent(instance, vm_ref).AndReturn(agent)
self.vmops._inject_hostname(instance, vm_ref, False)
self.vmops._remove_hostname(instance, vm_ref)
self.mox.ReplayAll()
self.vmops.reset_network(instance)
self.assertTrue(agent.called)
self.mox.VerifyAll()
def test_inject_hostname(self):
instance = {"hostname": "dummy", "os_type": "fake", "uuid": "uuid"}
vm_ref = "vm_ref"
self.mox.StubOutWithMock(self.vmops, '_add_to_param_xenstore')
self.vmops._add_to_param_xenstore(vm_ref, 'vm-data/hostname', 'dummy')
self.mox.ReplayAll()
self.vmops._inject_hostname(instance, vm_ref, rescue=False)
def test_inject_hostname_with_rescue_prefix(self):
instance = {"hostname": "dummy", "os_type": "fake", "uuid": "uuid"}
vm_ref = "vm_ref"
self.mox.StubOutWithMock(self.vmops, '_add_to_param_xenstore')
self.vmops._add_to_param_xenstore(vm_ref, 'vm-data/hostname',
'RESCUE-dummy')
self.mox.ReplayAll()
self.vmops._inject_hostname(instance, vm_ref, rescue=True)
def test_inject_hostname_with_windows_name_truncation(self):
instance = {"hostname": "dummydummydummydummydummy",
"os_type": "windows", "uuid": "uuid"}
vm_ref = "vm_ref"
self.mox.StubOutWithMock(self.vmops, '_add_to_param_xenstore')
self.vmops._add_to_param_xenstore(vm_ref, 'vm-data/hostname',
'RESCUE-dummydum')
self.mox.ReplayAll()
self.vmops._inject_hostname(instance, vm_ref, rescue=True)
def test_wait_for_instance_to_start(self):
instance = {"uuid": "uuid"}
vm_ref = "vm_ref"
self.mox.StubOutWithMock(vm_utils, 'get_power_state')
self.mox.StubOutWithMock(greenthread, 'sleep')
vm_utils.get_power_state(self._session, vm_ref).AndReturn(
power_state.SHUTDOWN)
greenthread.sleep(0.5)
vm_utils.get_power_state(self._session, vm_ref).AndReturn(
power_state.RUNNING)
self.mox.ReplayAll()
self.vmops._wait_for_instance_to_start(instance, vm_ref)
def test_attach_orig_disks(self):
instance = {"name": "dummy"}
vm_ref = "vm_ref"
vbd_refs = {vmops.DEVICE_ROOT: "vdi_ref"}
self.mox.StubOutWithMock(vm_utils, 'lookup')
self.mox.StubOutWithMock(self.vmops, '_find_vdi_refs')
self.mox.StubOutWithMock(vm_utils, 'create_vbd')
vm_utils.lookup(self.vmops._session, "dummy").AndReturn("ref")
self.vmops._find_vdi_refs("ref", exclude_volumes=True).AndReturn(
vbd_refs)
vm_utils.create_vbd(self.vmops._session, vm_ref, "vdi_ref",
vmops.DEVICE_RESCUE, bootable=False)
self.mox.ReplayAll()
self.vmops._attach_orig_disks(instance, vm_ref)
def test_agent_update_setup(self):
# agent updates need to occur after networking is configured
instance = {'name': 'betelgeuse',
'uuid': '1-2-3-4-5-6'}
vm_ref = 'vm_ref'
agent = xenapi_agent.XenAPIBasedAgent(self.vmops._session,
self.vmops._virtapi, instance, vm_ref)
self.mox.StubOutWithMock(xenapi_agent, 'should_use_agent')
self.mox.StubOutWithMock(self.vmops, '_get_agent')
self.mox.StubOutWithMock(agent, 'get_version')
self.mox.StubOutWithMock(agent, 'resetnetwork')
self.mox.StubOutWithMock(agent, 'update_if_needed')
xenapi_agent.should_use_agent(instance).AndReturn(True)
self.vmops._get_agent(instance, vm_ref).AndReturn(agent)
agent.get_version().AndReturn('1.2.3')
agent.resetnetwork()
agent.update_if_needed('1.2.3')
self.mox.ReplayAll()
self.vmops._configure_new_instance_with_agent(instance, vm_ref,
None, None)
class DestroyTestCase(VMOpsTestBase):
def setUp(self):
super(DestroyTestCase, self).setUp()
self.context = context.RequestContext(user_id=None, project_id=None)
self.instance = fake_instance.fake_instance_obj(self.context)
@mock.patch.object(vm_utils, 'lookup', side_effect=[None, None])
@mock.patch.object(vm_utils, 'hard_shutdown_vm')
@mock.patch.object(volume_utils, 'find_sr_by_uuid')
@mock.patch.object(volume_utils, 'forget_sr')
def test_no_vm_no_bdm(self, forget_sr, find_sr_by_uuid, hard_shutdown_vm,
lookup):
self.vmops.destroy(self.instance, 'network_info',
{'block_device_mapping': []})
self.assertEqual(0, find_sr_by_uuid.call_count)
self.assertEqual(0, forget_sr.call_count)
self.assertEqual(0, hard_shutdown_vm.call_count)
@mock.patch.object(vm_utils, 'lookup', side_effect=[None, None])
@mock.patch.object(vm_utils, 'hard_shutdown_vm')
@mock.patch.object(volume_utils, 'find_sr_by_uuid', return_value=None)
@mock.patch.object(volume_utils, 'forget_sr')
def test_no_vm_orphaned_volume_no_sr(self, forget_sr, find_sr_by_uuid,
hard_shutdown_vm, lookup):
self.vmops.destroy(self.instance, 'network_info',
{'block_device_mapping': [{'connection_info':
{'data': {'volume_id': 'fake-uuid'}}}]})
find_sr_by_uuid.assert_called_once_with(self.vmops._session,
'FA15E-D15C-fake-uuid')
self.assertEqual(0, forget_sr.call_count)
self.assertEqual(0, hard_shutdown_vm.call_count)
@mock.patch.object(vm_utils, 'lookup', side_effect=[None, None])
@mock.patch.object(vm_utils, 'hard_shutdown_vm')
@mock.patch.object(volume_utils, 'find_sr_by_uuid', return_value='sr_ref')
@mock.patch.object(volume_utils, 'forget_sr')
def test_no_vm_orphaned_volume_old_sr(self, forget_sr, find_sr_by_uuid,
hard_shutdown_vm, lookup):
self.vmops.destroy(self.instance, 'network_info',
{'block_device_mapping': [{'connection_info':
{'data': {'volume_id': 'fake-uuid'}}}]})
find_sr_by_uuid.assert_called_once_with(self.vmops._session,
'FA15E-D15C-fake-uuid')
forget_sr.assert_called_once_with(self.vmops._session, 'sr_ref')
self.assertEqual(0, hard_shutdown_vm.call_count)
@mock.patch.object(vm_utils, 'lookup', side_effect=[None, None])
@mock.patch.object(vm_utils, 'hard_shutdown_vm')
@mock.patch.object(volume_utils, 'find_sr_by_uuid',
side_effect=[None, 'sr_ref'])
@mock.patch.object(volume_utils, 'forget_sr')
@mock.patch.object(uuid, 'uuid5', return_value='fake-uuid')
def test_no_vm_orphaned_volume(self, uuid5, forget_sr,
find_sr_by_uuid, hard_shutdown_vm, lookup):
fake_data = {'volume_id': 'fake-uuid',
'target_portal': 'host:port',
'target_iqn': 'iqn'}
self.vmops.destroy(self.instance, 'network_info',
{'block_device_mapping': [{'connection_info':
{'data': fake_data}}]})
call1 = mock.call(self.vmops._session, 'FA15E-D15C-fake-uuid')
call2 = mock.call(self.vmops._session, 'fake-uuid')
uuid5.assert_called_once_with(volume_utils.SR_NAMESPACE,
'host/port/iqn')
find_sr_by_uuid.assert_has_calls([call1, call2])
forget_sr.assert_called_once_with(self.vmops._session, 'sr_ref')
self.assertEqual(0, hard_shutdown_vm.call_count)
@mock.patch.object(vmops.VMOps, '_update_instance_progress')
@mock.patch.object(vmops.VMOps, '_get_vm_opaque_ref')
@mock.patch.object(vm_utils, 'get_sr_path')
@mock.patch.object(vmops.VMOps, '_detach_block_devices_from_orig_vm')
@mock.patch.object(vmops.VMOps, '_migrate_disk_resizing_down')
@mock.patch.object(vmops.VMOps, '_migrate_disk_resizing_up')
class MigrateDiskAndPowerOffTestCase(VMOpsTestBase):
def setUp(self):
super(MigrateDiskAndPowerOffTestCase, self).setUp()
self.context = context.RequestContext('user', 'project')
def test_migrate_disk_and_power_off_works_down(self,
migrate_up, migrate_down, *mocks):
instance = {"root_gb": 2, "ephemeral_gb": 0, "uuid": "uuid"}
flavor = fake_flavor.fake_flavor_obj(self.context, root_gb=1,
ephemeral_gb=0)
self.vmops.migrate_disk_and_power_off(None, instance, None,
flavor, None)
self.assertFalse(migrate_up.called)
self.assertTrue(migrate_down.called)
def test_migrate_disk_and_power_off_works_up(self,
migrate_up, migrate_down, *mocks):
instance = {"root_gb": 1, "ephemeral_gb": 1, "uuid": "uuid"}
flavor = fake_flavor.fake_flavor_obj(self.context, root_gb=2,
ephemeral_gb=2)
self.vmops.migrate_disk_and_power_off(None, instance, None,
flavor, None)
self.assertFalse(migrate_down.called)
self.assertTrue(migrate_up.called)
def test_migrate_disk_and_power_off_resize_down_ephemeral_fails(self,
migrate_up, migrate_down, *mocks):
instance = {"ephemeral_gb": 2}
flavor = fake_flavor.fake_flavor_obj(self.context, ephemeral_gb=1)
self.assertRaises(exception.ResizeError,
self.vmops.migrate_disk_and_power_off,
None, instance, None, flavor, None)
@mock.patch.object(vm_utils, 'get_vdi_for_vm_safely')
@mock.patch.object(vm_utils, 'migrate_vhd')
@mock.patch.object(vmops.VMOps, '_resize_ensure_vm_is_shutdown')
@mock.patch.object(vm_utils, 'get_all_vdi_uuids_for_vm')
@mock.patch.object(vmops.VMOps, '_update_instance_progress')
@mock.patch.object(vmops.VMOps, '_apply_orig_vm_name_label')
class MigrateDiskResizingUpTestCase(VMOpsTestBase):
def _fake_snapshot_attached_here(self, session, instance, vm_ref, label,
userdevice, post_snapshot_callback):
self.assertIsInstance(instance, dict)
if userdevice == '0':
self.assertEqual("vm_ref", vm_ref)
self.assertEqual("fake-snapshot", label)
yield ["leaf", "parent", "grandp"]
else:
leaf = userdevice + "-leaf"
parent = userdevice + "-parent"
yield [leaf, parent]
@mock.patch.object(volume_utils, 'is_booted_from_volume',
return_value=False)
def test_migrate_disk_resizing_up_works_no_ephemeral(self,
mock_is_booted_from_volume,
mock_apply_orig, mock_update_progress, mock_get_all_vdi_uuids,
mock_shutdown, mock_migrate_vhd, mock_get_vdi_for_vm):
context = "ctxt"
instance = {"name": "fake", "uuid": "uuid"}
dest = "dest"
vm_ref = "vm_ref"
sr_path = "sr_path"
mock_get_all_vdi_uuids.return_value = None
mock_get_vdi_for_vm.return_value = ({}, {"uuid": "root"})
with mock.patch.object(vm_utils, '_snapshot_attached_here_impl',
self._fake_snapshot_attached_here):
self.vmops._migrate_disk_resizing_up(context, instance, dest,
vm_ref, sr_path)
mock_get_all_vdi_uuids.assert_called_once_with(self.vmops._session,
vm_ref, min_userdevice=4)
mock_apply_orig.assert_called_once_with(instance, vm_ref)
mock_shutdown.assert_called_once_with(instance, vm_ref)
m_vhd_expected = [mock.call(self.vmops._session, instance, "parent",
dest, sr_path, 1),
mock.call(self.vmops._session, instance, "grandp",
dest, sr_path, 2),
mock.call(self.vmops._session, instance, "root",
dest, sr_path, 0)]
self.assertEqual(m_vhd_expected, mock_migrate_vhd.call_args_list)
prog_expected = [
mock.call(context, instance, 1, 5),
mock.call(context, instance, 2, 5),
mock.call(context, instance, 3, 5),
mock.call(context, instance, 4, 5)
# 5/5: step to be executed by finish migration.
]
self.assertEqual(prog_expected, mock_update_progress.call_args_list)
@mock.patch.object(volume_utils, 'is_booted_from_volume',
return_value=False)
def test_migrate_disk_resizing_up_works_with_two_ephemeral(self,
mock_is_booted_from_volume,
mock_apply_orig, mock_update_progress, mock_get_all_vdi_uuids,
mock_shutdown, mock_migrate_vhd, mock_get_vdi_for_vm):
context = "ctxt"
instance = {"name": "fake", "uuid": "uuid"}
dest = "dest"
vm_ref = "vm_ref"
sr_path = "sr_path"
mock_get_all_vdi_uuids.return_value = ["vdi-eph1", "vdi-eph2"]
mock_get_vdi_for_vm.side_effect = [({}, {"uuid": "root"}),
({}, {"uuid": "4-root"}),
({}, {"uuid": "5-root"})]
with mock.patch.object(vm_utils, '_snapshot_attached_here_impl',
self._fake_snapshot_attached_here):
self.vmops._migrate_disk_resizing_up(context, instance, dest,
vm_ref, sr_path)
mock_get_all_vdi_uuids.assert_called_once_with(self.vmops._session,
vm_ref, min_userdevice=4)
mock_apply_orig.assert_called_once_with(instance, vm_ref)
mock_shutdown.assert_called_once_with(instance, vm_ref)
m_vhd_expected = [mock.call(self.vmops._session, instance,
"parent", dest, sr_path, 1),
mock.call(self.vmops._session, instance,
"grandp", dest, sr_path, 2),
mock.call(self.vmops._session, instance,
"4-parent", dest, sr_path, 1, 1),
mock.call(self.vmops._session, instance,
"5-parent", dest, sr_path, 1, 2),
mock.call(self.vmops._session, instance,
"root", dest, sr_path, 0),
mock.call(self.vmops._session, instance,
"4-root", dest, sr_path, 0, 1),
mock.call(self.vmops._session, instance,
"5-root", dest, sr_path, 0, 2)]
self.assertEqual(m_vhd_expected, mock_migrate_vhd.call_args_list)
prog_expected = [
mock.call(context, instance, 1, 5),
mock.call(context, instance, 2, 5),
mock.call(context, instance, 3, 5),
mock.call(context, instance, 4, 5)
# 5/5: step to be executed by finish migration.
]
self.assertEqual(prog_expected, mock_update_progress.call_args_list)
@mock.patch.object(volume_utils, 'is_booted_from_volume',
return_value=True)
def test_migrate_disk_resizing_up_booted_from_volume(self,
mock_is_booted_from_volume,
mock_apply_orig, mock_update_progress, mock_get_all_vdi_uuids,
mock_shutdown, mock_migrate_vhd, mock_get_vdi_for_vm):
context = "ctxt"
instance = {"name": "fake", "uuid": "uuid"}
dest = "dest"
vm_ref = "vm_ref"
sr_path = "sr_path"
mock_get_all_vdi_uuids.return_value = ["vdi-eph1", "vdi-eph2"]
mock_get_vdi_for_vm.side_effect = [({}, {"uuid": "4-root"}),
({}, {"uuid": "5-root"})]
with mock.patch.object(vm_utils, '_snapshot_attached_here_impl',
self._fake_snapshot_attached_here):
self.vmops._migrate_disk_resizing_up(context, instance, dest,
vm_ref, sr_path)
mock_get_all_vdi_uuids.assert_called_once_with(self.vmops._session,
vm_ref, min_userdevice=4)
mock_apply_orig.assert_called_once_with(instance, vm_ref)
mock_shutdown.assert_called_once_with(instance, vm_ref)
m_vhd_expected = [mock.call(self.vmops._session, instance,
"4-parent", dest, sr_path, 1, 1),
mock.call(self.vmops._session, instance,
"5-parent", dest, sr_path, 1, 2),
mock.call(self.vmops._session, instance,
"4-root", dest, sr_path, 0, 1),
mock.call(self.vmops._session, instance,
"5-root", dest, sr_path, 0, 2)]
self.assertEqual(m_vhd_expected, mock_migrate_vhd.call_args_list)
prog_expected = [
mock.call(context, instance, 1, 5),
mock.call(context, instance, 2, 5),
mock.call(context, instance, 3, 5),
mock.call(context, instance, 4, 5)
# 5/5: step to be executed by finish migration.
]
self.assertEqual(prog_expected, mock_update_progress.call_args_list)
@mock.patch.object(vmops.VMOps, '_restore_orig_vm_and_cleanup_orphan')
@mock.patch.object(volume_utils, 'is_booted_from_volume',
return_value=False)
def test_migrate_disk_resizing_up_rollback(self,
mock_is_booted_from_volume,
mock_restore,
mock_apply_orig, mock_update_progress, mock_get_all_vdi_uuids,
mock_shutdown, mock_migrate_vhd, mock_get_vdi_for_vm):
context = "ctxt"
instance = {"name": "fake", "uuid": "fake"}
dest = "dest"
vm_ref = "vm_ref"
sr_path = "sr_path"
mock_migrate_vhd.side_effect = test.TestingException
mock_restore.side_effect = test.TestingException
with mock.patch.object(vm_utils, '_snapshot_attached_here_impl',
self._fake_snapshot_attached_here):
self.assertRaises(exception.InstanceFaultRollback,
self.vmops._migrate_disk_resizing_up,
context, instance, dest, vm_ref, sr_path)
mock_apply_orig.assert_called_once_with(instance, vm_ref)
mock_restore.assert_called_once_with(instance)
mock_migrate_vhd.assert_called_once_with(self.vmops._session,
instance, "parent", dest, sr_path, 1)
class CreateVMRecordTestCase(VMOpsTestBase):
@mock.patch.object(vm_utils, 'determine_vm_mode')
@mock.patch.object(vm_utils, 'get_vm_device_id')
@mock.patch.object(vm_utils, 'create_vm')
def test_create_vm_record_with_vm_device_id(self, mock_create_vm,
mock_get_vm_device_id, mock_determine_vm_mode):
context = "context"
instance = objects.Instance(vm_mode="vm_mode", uuid="uuid123")
name_label = "dummy"
disk_image_type = "vhd"
kernel_file = "kernel"
ramdisk_file = "ram"
device_id = "0002"
image_properties = {"xenapi_device_id": device_id}
image_meta = objects.ImageMeta.from_dict(
{"properties": image_properties})
rescue = False
session = "session"
self.vmops._session = session
mock_get_vm_device_id.return_value = device_id
mock_determine_vm_mode.return_value = "vm_mode"
self.vmops._create_vm_record(context, instance, name_label,
disk_image_type, kernel_file, ramdisk_file, image_meta, rescue)
mock_get_vm_device_id.assert_called_with(session, image_meta)
mock_create_vm.assert_called_with(session, instance, name_label,
kernel_file, ramdisk_file, False, device_id)
class BootableTestCase(VMOpsTestBase):
def setUp(self):
super(BootableTestCase, self).setUp()
self.instance = {"name": "test", "uuid": "fake"}
vm_rec, self.vm_ref = self.create_vm('test')
# sanity check bootlock is initially disabled:
self.assertEqual({}, vm_rec['blocked_operations'])
def _get_blocked(self):
vm_rec = self._session.call_xenapi("VM.get_record", self.vm_ref)
return vm_rec['blocked_operations']
def test_acquire_bootlock(self):
self.vmops._acquire_bootlock(self.vm_ref)
blocked = self._get_blocked()
self.assertIn('start', blocked)
def test_release_bootlock(self):
self.vmops._acquire_bootlock(self.vm_ref)
self.vmops._release_bootlock(self.vm_ref)
blocked = self._get_blocked()
self.assertNotIn('start', blocked)
def test_set_bootable(self):
self.vmops.set_bootable(self.instance, True)
blocked = self._get_blocked()
self.assertNotIn('start', blocked)
def test_set_not_bootable(self):
self.vmops.set_bootable(self.instance, False)
blocked = self._get_blocked()
self.assertIn('start', blocked)
@mock.patch.object(vm_utils, 'update_vdi_virtual_size', autospec=True)
class ResizeVdisTestCase(VMOpsTestBase):
def test_dont_resize_root_volumes_osvol_false(self, mock_resize):
instance = fake_instance.fake_db_instance(root_gb=20)
vdis = {'root': {'osvol': False, 'ref': 'vdi_ref'}}
self.vmops._resize_up_vdis(instance, vdis)
self.assertTrue(mock_resize.called)
def test_dont_resize_root_volumes_osvol_true(self, mock_resize):
instance = fake_instance.fake_db_instance(root_gb=20)
vdis = {'root': {'osvol': True}}
self.vmops._resize_up_vdis(instance, vdis)
self.assertFalse(mock_resize.called)
def test_dont_resize_root_volumes_no_osvol(self, mock_resize):
instance = fake_instance.fake_db_instance(root_gb=20)
vdis = {'root': {}}
self.vmops._resize_up_vdis(instance, vdis)
self.assertFalse(mock_resize.called)
@mock.patch.object(vm_utils, 'get_ephemeral_disk_sizes')
def test_ensure_ephemeral_resize_with_root_volume(self, mock_sizes,
mock_resize):
mock_sizes.return_value = [2000, 1000]
instance = fake_instance.fake_db_instance(root_gb=20, ephemeral_gb=20)
ephemerals = {"4": {"ref": 4}, "5": {"ref": 5}}
vdis = {'root': {'osvol': True, 'ref': 'vdi_ref'},
'ephemerals': ephemerals}
with mock.patch.object(vm_utils, 'generate_single_ephemeral',
autospec=True) as g:
self.vmops._resize_up_vdis(instance, vdis)
self.assertEqual([mock.call(self.vmops._session, instance, 4,
2000),
mock.call(self.vmops._session, instance, 5,
1000)],
mock_resize.call_args_list)
self.assertFalse(g.called)
def test_resize_up_vdis_root(self, mock_resize):
instance = {"root_gb": 20, "ephemeral_gb": 0}
self.vmops._resize_up_vdis(instance, {"root": {"ref": "vdi_ref"}})
mock_resize.assert_called_once_with(self.vmops._session, instance,
"vdi_ref", 20)
def test_resize_up_vdis_zero_disks(self, mock_resize):
instance = {"root_gb": 0, "ephemeral_gb": 0}
self.vmops._resize_up_vdis(instance, {"root": {}})
self.assertFalse(mock_resize.called)
def test_resize_up_vdis_no_vdis_like_initial_spawn(self, mock_resize):
instance = {"root_gb": 0, "ephemeral_gb": 3000}
vdis = {}
self.vmops._resize_up_vdis(instance, vdis)
self.assertFalse(mock_resize.called)
@mock.patch.object(vm_utils, 'get_ephemeral_disk_sizes')
def test_resize_up_vdis_ephemeral(self, mock_sizes, mock_resize):
mock_sizes.return_value = [2000, 1000]
instance = {"root_gb": 0, "ephemeral_gb": 3000}
ephemerals = {"4": {"ref": 4}, "5": {"ref": 5}}
vdis = {"ephemerals": ephemerals}
self.vmops._resize_up_vdis(instance, vdis)
mock_sizes.assert_called_once_with(3000)
expected = [mock.call(self.vmops._session, instance, 4, 2000),
mock.call(self.vmops._session, instance, 5, 1000)]
self.assertEqual(expected, mock_resize.call_args_list)
@mock.patch.object(vm_utils, 'generate_single_ephemeral')
@mock.patch.object(vm_utils, 'get_ephemeral_disk_sizes')
def test_resize_up_vdis_ephemeral_with_generate(self, mock_sizes,
mock_generate,
mock_resize):
mock_sizes.return_value = [2000, 1000]
instance = {"root_gb": 0, "ephemeral_gb": 3000, "uuid": "a"}
ephemerals = {"4": {"ref": 4}}
vdis = {"ephemerals": ephemerals}
self.vmops._resize_up_vdis(instance, vdis)
mock_sizes.assert_called_once_with(3000)
mock_resize.assert_called_once_with(self.vmops._session, instance,
4, 2000)
mock_generate.assert_called_once_with(self.vmops._session, instance,
None, 5, 1000)
@mock.patch.object(vm_utils, 'remove_old_snapshots')
class CleanupFailedSnapshotTestCase(VMOpsTestBase):
def test_post_interrupted_snapshot_cleanup(self, mock_remove):
self.vmops._get_vm_opaque_ref = mock.Mock()
self.vmops._get_vm_opaque_ref.return_value = "vm_ref"
self.vmops.post_interrupted_snapshot_cleanup("context", "instance")
mock_remove.assert_called_once_with(self.vmops._session,
"instance", "vm_ref")
class XenstoreCallsTestCase(VMOpsTestBase):
"""Test cases for Read/Write/Delete/Update xenstore calls
from vmops.
"""
@mock.patch.object(vmops.VMOps, '_make_plugin_call')
def test_read_from_xenstore(self, fake_xapi_call):
fake_xapi_call.return_value = "fake_xapi_return"
fake_instance = {"name": "fake_instance"}
path = "attr/PVAddons/MajorVersion"
self.assertEqual("fake_xapi_return",
self.vmops._read_from_xenstore(fake_instance, path,
vm_ref="vm_ref"))
@mock.patch.object(vmops.VMOps, '_make_plugin_call')
def test_read_from_xenstore_ignore_missing_path(self, fake_xapi_call):
fake_instance = {"name": "fake_instance"}
path = "attr/PVAddons/MajorVersion"
self.vmops._read_from_xenstore(fake_instance, path, vm_ref="vm_ref")
fake_xapi_call.assert_called_once_with('xenstore.py', 'read_record',
fake_instance, vm_ref="vm_ref",
path=path,
ignore_missing_path='True')
@mock.patch.object(vmops.VMOps, '_make_plugin_call')
def test_read_from_xenstore_missing_path(self, fake_xapi_call):
fake_instance = {"name": "fake_instance"}
path = "attr/PVAddons/MajorVersion"
self.vmops._read_from_xenstore(fake_instance, path, vm_ref="vm_ref",
ignore_missing_path=False)
fake_xapi_call.assert_called_once_with('xenstore.py', 'read_record',
fake_instance, vm_ref="vm_ref",
path=path,
ignore_missing_path='False')
class LiveMigrateFakeVersionTestCase(VMOpsTestBase):
@mock.patch.object(vmops.VMOps, '_pv_device_reported')
@mock.patch.object(vmops.VMOps, '_pv_driver_version_reported')
@mock.patch.object(vmops.VMOps, '_write_fake_pv_version')
def test_ensure_pv_driver_info_for_live_migration(
self,
mock_write_fake_pv_version,
mock_pv_driver_version_reported,
mock_pv_device_reported):
mock_pv_device_reported.return_value = True
mock_pv_driver_version_reported.return_value = False
fake_instance = {"name": "fake_instance"}
self.vmops._ensure_pv_driver_info_for_live_migration(fake_instance,
"vm_rec")
mock_write_fake_pv_version.assert_called_once_with(fake_instance,
"vm_rec")
@mock.patch.object(vmops.VMOps, '_read_from_xenstore')
def test_pv_driver_version_reported_None(self, fake_read_from_xenstore):
fake_read_from_xenstore.return_value = '"None"'
fake_instance = {"name": "fake_instance"}
self.assertFalse(self.vmops._pv_driver_version_reported(fake_instance,
"vm_ref"))
@mock.patch.object(vmops.VMOps, '_read_from_xenstore')
def test_pv_driver_version_reported(self, fake_read_from_xenstore):
fake_read_from_xenstore.return_value = '6.2.0'
fake_instance = {"name": "fake_instance"}
self.assertTrue(self.vmops._pv_driver_version_reported(fake_instance,
"vm_ref"))
@mock.patch.object(vmops.VMOps, '_read_from_xenstore')
def test_pv_device_reported(self, fake_read_from_xenstore):
with mock.patch.object(self._session.VM, 'get_record') as fake_vm_rec:
fake_vm_rec.return_value = {'VIFs': 'fake-vif-object'}
with mock.patch.object(self._session, 'call_xenapi') as fake_call:
fake_call.return_value = {'device': '0'}
fake_read_from_xenstore.return_value = '4'
fake_instance = {"name": "fake_instance"}
self.assertTrue(self.vmops._pv_device_reported(fake_instance,
"vm_ref"))
@mock.patch.object(vmops.VMOps, '_read_from_xenstore')
def test_pv_device_not_reported(self, fake_read_from_xenstore):
with mock.patch.object(self._session.VM, 'get_record') as fake_vm_rec:
fake_vm_rec.return_value = {'VIFs': 'fake-vif-object'}
with mock.patch.object(self._session, 'call_xenapi') as fake_call:
fake_call.return_value = {'device': '0'}
fake_read_from_xenstore.return_value = '0'
fake_instance = {"name": "fake_instance"}
self.assertFalse(self.vmops._pv_device_reported(fake_instance,
"vm_ref"))
@mock.patch.object(vmops.VMOps, '_read_from_xenstore')
def test_pv_device_None_reported(self, fake_read_from_xenstore):
with mock.patch.object(self._session.VM, 'get_record') as fake_vm_rec:
fake_vm_rec.return_value = {'VIFs': 'fake-vif-object'}
with mock.patch.object(self._session, 'call_xenapi') as fake_call:
fake_call.return_value = {'device': '0'}
fake_read_from_xenstore.return_value = '"None"'
fake_instance = {"name": "fake_instance"}
self.assertFalse(self.vmops._pv_device_reported(fake_instance,
"vm_ref"))
@mock.patch.object(vmops.VMOps, '_write_to_xenstore')
def test_write_fake_pv_version(self, fake_write_to_xenstore):
fake_write_to_xenstore.return_value = 'fake_return'
fake_instance = {"name": "fake_instance"}
with mock.patch.object(self._session, 'product_version') as version:
version.return_value = ('6', '2', '0')
self.assertIsNone(self.vmops._write_fake_pv_version(fake_instance,
"vm_ref"))
class LiveMigrateHelperTestCase(VMOpsTestBase):
def test_connect_block_device_volumes_none(self):
self.assertEqual({}, self.vmops.connect_block_device_volumes(None))
@mock.patch.object(volumeops.VolumeOps, "connect_volume")
def test_connect_block_device_volumes_calls_connect(self, mock_connect):
with mock.patch.object(self.vmops._session,
"call_xenapi") as mock_session:
mock_connect.return_value = ("sr_uuid", None)
mock_session.return_value = "sr_ref"
bdm = {"connection_info": "c_info"}
bdi = {"block_device_mapping": [bdm]}
result = self.vmops.connect_block_device_volumes(bdi)
self.assertEqual({'sr_uuid': 'sr_ref'}, result)
mock_connect.assert_called_once_with("c_info")
mock_session.assert_called_once_with("SR.get_by_uuid",
"sr_uuid")
class RollbackLiveMigrateDestinationTestCase(VMOpsTestBase):
@mock.patch.object(volume_utils, 'find_sr_by_uuid', return_value='sr_ref')
@mock.patch.object(volume_utils, 'forget_sr')
def test_rollback_dest_calls_sr_forget(self, forget_sr, sr_ref):
block_device_info = {'block_device_mapping': [{'connection_info':
{'data': {'volume_id': 'fake-uuid',
'target_iqn': 'fake-iqn',
'target_portal': 'fake-portal'}}}]}
self.vmops.rollback_live_migration_at_destination('instance',
block_device_info)
forget_sr.assert_called_once_with(self.vmops._session, 'sr_ref')
@mock.patch.object(volume_utils, 'forget_sr')
@mock.patch.object(volume_utils, 'find_sr_by_uuid',
side_effect=test.TestingException)
def test_rollback_dest_handles_exception(self, find_sr_ref, forget_sr):
block_device_info = {'block_device_mapping': [{'connection_info':
{'data': {'volume_id': 'fake-uuid',
'target_iqn': 'fake-iqn',
'target_portal': 'fake-portal'}}}]}
self.vmops.rollback_live_migration_at_destination('instance',
block_device_info)
self.assertFalse(forget_sr.called)
@mock.patch.object(vmops.VMOps, '_resize_ensure_vm_is_shutdown')
@mock.patch.object(vmops.VMOps, '_apply_orig_vm_name_label')
@mock.patch.object(vmops.VMOps, '_update_instance_progress')
@mock.patch.object(vm_utils, 'get_vdi_for_vm_safely')
@mock.patch.object(vm_utils, 'resize_disk')
@mock.patch.object(vm_utils, 'migrate_vhd')
@mock.patch.object(vm_utils, 'destroy_vdi')
class MigrateDiskResizingDownTestCase(VMOpsTestBase):
def test_migrate_disk_resizing_down_works_no_ephemeral(
self,
mock_destroy_vdi,
mock_migrate_vhd,
mock_resize_disk,
mock_get_vdi_for_vm_safely,
mock_update_instance_progress,
mock_apply_orig_vm_name_label,
mock_resize_ensure_vm_is_shutdown):
context = "ctx"
instance = {"name": "fake", "uuid": "uuid"}
dest = "dest"
vm_ref = "vm_ref"
sr_path = "sr_path"
instance_type = dict(root_gb=1)
old_vdi_ref = "old_ref"
new_vdi_ref = "new_ref"
new_vdi_uuid = "new_uuid"
mock_get_vdi_for_vm_safely.return_value = (old_vdi_ref, None)
mock_resize_disk.return_value = (new_vdi_ref, new_vdi_uuid)
self.vmops._migrate_disk_resizing_down(context, instance, dest,
instance_type, vm_ref, sr_path)
mock_get_vdi_for_vm_safely.assert_called_once_with(
self.vmops._session,
vm_ref)
mock_resize_ensure_vm_is_shutdown.assert_called_once_with(
instance, vm_ref)
mock_apply_orig_vm_name_label.assert_called_once_with(
instance, vm_ref)
mock_resize_disk.assert_called_once_with(
self.vmops._session,
instance,
old_vdi_ref,
instance_type)
mock_migrate_vhd.assert_called_once_with(
self.vmops._session,
instance,
new_vdi_uuid,
dest,
sr_path, 0)
mock_destroy_vdi.assert_called_once_with(
self.vmops._session,
new_vdi_ref)
prog_expected = [
mock.call(context, instance, 1, 5),
mock.call(context, instance, 2, 5),
mock.call(context, instance, 3, 5),
mock.call(context, instance, 4, 5)
# 5/5: step to be executed by finish migration.
]
self.assertEqual(prog_expected,
mock_update_instance_progress.call_args_list)
class GetVdisForInstanceTestCase(VMOpsTestBase):
"""Tests get_vdis_for_instance utility method."""
def setUp(self):
super(GetVdisForInstanceTestCase, self).setUp()
self.context = context.get_admin_context()
self.context.auth_token = 'auth_token'
self.session = mock.Mock()
self.vmops._session = self.session
self.instance = fake_instance.fake_instance_obj(self.context)
self.name_label = 'name'
self.image = 'fake_image_id'
@mock.patch.object(volumeops.VolumeOps, "connect_volume",
return_value=("sr", "vdi_uuid"))
def test_vdis_for_instance_bdi_password_scrubbed(self, get_uuid_mock):
# setup fake data
data = {'name_label': self.name_label,
'sr_uuid': 'fake',
'auth_password': 'scrubme'}
bdm = [{'mount_device': '/dev/vda',
'connection_info': {'data': data}}]
bdi = {'root_device_name': 'vda',
'block_device_mapping': bdm}
# Tests that the parameters to the to_xml method are sanitized for
# passwords when logged.
def fake_debug(*args, **kwargs):
if 'auth_password' in args[0]:
self.assertNotIn('scrubme', args[0])
fake_debug.matched = True
fake_debug.matched = False
with mock.patch.object(vmops.LOG, 'debug',
side_effect=fake_debug) as debug_mock:
vdis = self.vmops._get_vdis_for_instance(self.context,
self.instance, self.name_label, self.image,
image_type=4, block_device_info=bdi)
self.assertEqual(1, len(vdis))
get_uuid_mock.assert_called_once_with({"data": data})
# we don't care what the log message is, we just want to make sure
# our stub method is called which asserts the password is scrubbed
self.assertTrue(debug_mock.called)
self.assertTrue(fake_debug.matched)
| dims/nova | nova/tests/unit/virt/xenapi/test_vmops.py | Python | apache-2.0 | 66,102 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Serialization response factories.
Responsible for creating a HTTP response given the output of a serializer.
"""
from __future__ import absolute_import, print_function
from flask import current_app
def record_responsify(serializer, mimetype):
"""Create a Records-REST response serializer.
:param serializer: Serializer instance.
:param mimetype: MIME type of response.
:returns: Function that generates a record HTTP response.
"""
def view(pid, record, code=200, headers=None, links_factory=None):
response = current_app.response_class(
serializer.serialize(pid, record, links_factory=links_factory),
mimetype=mimetype)
response.status_code = code
response.set_etag(str(record.revision_id))
response.last_modified = record.updated
if headers is not None:
response.headers.extend(headers)
if links_factory is not None:
add_link_header(response, links_factory(pid))
return response
return view
def search_responsify(serializer, mimetype):
"""Create a Records-REST search result response serializer.
:param serializer: Serializer instance.
:param mimetype: MIME type of response.
:returns: Function that generates a record HTTP response.
"""
def view(pid_fetcher, search_result, code=200, headers=None, links=None,
item_links_factory=None):
response = current_app.response_class(
serializer.serialize_search(pid_fetcher, search_result,
links=links,
item_links_factory=item_links_factory),
mimetype=mimetype)
response.status_code = code
if headers is not None:
response.headers.extend(headers)
if links is not None:
add_link_header(response, links)
return response
return view
def add_link_header(response, links):
"""Add a Link HTTP header to a REST response.
:param response: REST response instance
:param links: Dictionary of links
"""
if links is not None:
response.headers.extend({
'Link': ', '.join([
'<{0}>; rel="{1}"'.format(l, r) for r, l in links.items()])
})
| tiborsimko/invenio-records-rest | invenio_records_rest/serializers/response.py | Python | mit | 2,527 |
import os
def sect_from_bracket_file(infile, get_section = 'atoms', skip = ';'):
intt = open(infile,'r')
table = []
section = 'GLOBAL'
for ln in intt.readlines():
ln = ln.replace('\t',' ')
sp = ln.split()
if sp == [] or skip in sp[0] or skip in sp[0][0]:
continue
if '[' and ']' in ln:
section = ln.replace('[',' ').replace(']',' ').strip()
elif section != get_section:
continue
else:
table.append(ln.split())
intt.close()
return table
def dict_from_bracket_file(infile, skip = '#'):
intt = open(infile,'r')
options = {}
section = 'GLOBAL'
for ln in intt.readlines():
ln = ln.replace('\t',' ')
sp = ln.split()
if sp == [] or skip in sp[0] or skip in sp[0][0]:
continue
if '[' and ']' in ln:
section = ln.replace('[',' ').replace(']',' ').strip()
options[section] = {}
else:
ln = ln.split('=')
key = ln[0].strip()
tmpstr = ln[1].strip()
if tmpstr[0:1] == '"' and tmpstr[-1:] == '"':
# One single string
arg = [tmpstr.replace('"','')]
else:
tmp = ln[1].strip().split()
arg = []
for t in tmp:
if skip in t:
break
else:
try:
arg.append(float(t))
except ValueError:
arg.append(t)
options[section][key] = arg
intt.close()
return options
def struct_from_topfile(infile):
system = sect_from_bracket_file(infile, get_section = 'molecules')
name = sect_from_bracket_file(infile, get_section = 'system')
include = sect_from_bracket_file(infile, get_section = 'GLOBAL')
mol_nrAtoms = {}
for item in include:
name = item[1].replace('"','')
try:
molecule = sect_from_bracket_file(name, 'moleculetype')[0][0]
except IndexError:
continue
nr_atoms = len(sect_from_bracket_file(name))
mol_nrAtoms[molecule] = nr_atoms
mol_nrMol_nrAtoms = []
for item in system:
mol_nrMol_nrAtoms.append( [item[0], int(item[1]), mol_nrAtoms[item[0]]] )
return mol_nrMol_nrAtoms
| 12AngryMen/votca-scripts | lib/Carlstuff/evaporation2/__proptions__.py | Python | apache-2.0 | 1,993 |
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((5738.45, 5346.13, 12557.2), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((6243.73, 5697.82, 10841.6), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((4946.81, 6288.5, 9473.97), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((4641.36, 7362.37, 11524.5), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((2979.61, 7877.64, 11249.2), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((2120.55, 6920.28, 9161.22), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((1446.49, 5719.89, 8130.03), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((1199.22, 6105.05, 8917.83), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((1583.59, 4386.53, 6852.09), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((947.909, 2827.65, 7049.52), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((1829.72, 2291.94, 5466.01), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((3098.59, 2313.06, 5995.55), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((4653.15, 2028.23, 6107.58), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((4531.17, 2132.74, 7659.86), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((6192.15, 845.314, 8529.91), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((9244.94, 1330.8, 8604.21), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((9371.7, 3005.1, 7638.81), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((9108.93, 2241.25, 6435.37), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((7472.47, 1957.45, 6031.21), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((6637.84, 776.133, 5596.31), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((4939.19, 2504.52, 5331.35), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((6780.86, 1600.28, 5328.87), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((6954.15, 2041.49, 4778.35), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((8194.43, 2411.02, 4710.38), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((8806.62, 2696.87, 5920.21), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((10031.3, 2278.14, 6813.27), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((8586.62, 2181.36, 6254.85), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((6491.47, 2736.6, 6624.23), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((6334.04, 2551.4, 5141.87), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((5140.06, 2449.82, 4613.54), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5716.69, 3424.16, 4563.22), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((4296.59, 2669.12, 5324.03), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((5322.29, 1826.02, 4113.06), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((6556.11, 2673.63, 4159.41), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((7472.31, 2097.35, 4987.06), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((8410.79, 1198.77, 5535.71), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((6250.83, 2317.87, 5292.78), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((8009.46, 1990.11, 4771.77), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((7714.02, 2687.49, 5375.6), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((7552.2, 1742.03, 4125.86), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((6138.05, 2545.51, 3604.41), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((4571.33, 1942.82, 2736.86), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((6131.85, 1415.68, 690.432), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((4363.09, 1888.45, 1342.38), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((5654.65, 2247.79, 2305.85), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((4456.68, 3495.35, 3202.32), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((5044.93, 4941.82, 1961.52), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((5048.32, 3904.5, 188.623), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((4422.83, 5292.5, 1253.11), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((3646.84, 5719.03, 2879.57), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((3508.31, 4916.93, 1956.47), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((3102.18, 4784.25, 3570.67), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((3171.27, 4696.73, 5333.77), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((1628.79, 4833.57, 5494.08), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((1458.13, 5098.3, 4836.89), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((3304.3, 5600.79, 4002.12), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((3818.45, 7465.8, 5022.77), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((5482.72, 9327.65, 4691.01), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((5915.2, 9718.69, 4565.2), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((5724.91, 9942.65, 5327.16), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((5199.86, 9341.59, 4407.13), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((4750.26, 10116.1, 4221.06), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((4591.98, 8321.89, 4829.17), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((6402.1, 8554.8, 5558.92), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((8292.29, 8868.61, 5834.6), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((7915.25, 7583.47, 4618.25), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((8794.54, 8786.69, 3878.66), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((6678.97, 8217.06, 4674.1), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((7704.54, 10218.1, 5313.62), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((8994.31, 9466.5, 4786.82), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((8119.93, 9055.84, 4180.02), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| batxes/4Cin | SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/mtx1_models/SHH_WT_models7948.py | Python | gpl-3.0 | 17,578 |
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class Action(AbstractModel):
"""规则引擎转发动作
"""
def __init__(self):
r"""
:param Topic: 转发至topic
注意:此字段可能返回 null,表示取不到有效值。
:type Topic: :class:`tencentcloud.iot.v20180123.models.TopicAction`
:param Service: 转发至第三发
注意:此字段可能返回 null,表示取不到有效值。
:type Service: :class:`tencentcloud.iot.v20180123.models.ServiceAction`
:param Ckafka: 转发至第三发Ckafka
注意:此字段可能返回 null,表示取不到有效值。
:type Ckafka: :class:`tencentcloud.iot.v20180123.models.CkafkaAction`
"""
self.Topic = None
self.Service = None
self.Ckafka = None
def _deserialize(self, params):
if params.get("Topic") is not None:
self.Topic = TopicAction()
self.Topic._deserialize(params.get("Topic"))
if params.get("Service") is not None:
self.Service = ServiceAction()
self.Service._deserialize(params.get("Service"))
if params.get("Ckafka") is not None:
self.Ckafka = CkafkaAction()
self.Ckafka._deserialize(params.get("Ckafka"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ActivateRuleRequest(AbstractModel):
"""ActivateRule请求参数结构体
"""
def __init__(self):
r"""
:param RuleId: 规则Id
:type RuleId: str
"""
self.RuleId = None
def _deserialize(self, params):
self.RuleId = params.get("RuleId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ActivateRuleResponse(AbstractModel):
"""ActivateRule返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AddDeviceRequest(AbstractModel):
"""AddDevice请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称,唯一标识某产品下的一个设备
:type DeviceName: str
"""
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddDeviceResponse(AbstractModel):
"""AddDevice返回参数结构体
"""
def __init__(self):
r"""
:param Device: 设备信息
:type Device: :class:`tencentcloud.iot.v20180123.models.Device`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Device = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Device") is not None:
self.Device = Device()
self.Device._deserialize(params.get("Device"))
self.RequestId = params.get("RequestId")
class AddProductRequest(AbstractModel):
"""AddProduct请求参数结构体
"""
def __init__(self):
r"""
:param Name: 产品名称,同一区域产品名称需唯一,支持中文、英文字母、中划线和下划线,长度不超过31个字符,中文占两个字符
:type Name: str
:param Description: 产品描述
:type Description: str
:param DataTemplate: 数据模版
:type DataTemplate: list of DataTemplate
:param DataProtocol: 产品版本(native表示基础版,template表示高级版,默认值为template)
:type DataProtocol: str
:param AuthType: 设备认证方式(1:动态令牌,2:签名直连鉴权)
:type AuthType: int
:param CommProtocol: 通信方式(other/wifi/cellular/nb-iot)
:type CommProtocol: str
:param DeviceType: 产品的设备类型(device: 直连设备;sub_device:子设备;gateway:网关设备)
:type DeviceType: str
"""
self.Name = None
self.Description = None
self.DataTemplate = None
self.DataProtocol = None
self.AuthType = None
self.CommProtocol = None
self.DeviceType = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Description = params.get("Description")
if params.get("DataTemplate") is not None:
self.DataTemplate = []
for item in params.get("DataTemplate"):
obj = DataTemplate()
obj._deserialize(item)
self.DataTemplate.append(obj)
self.DataProtocol = params.get("DataProtocol")
self.AuthType = params.get("AuthType")
self.CommProtocol = params.get("CommProtocol")
self.DeviceType = params.get("DeviceType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddProductResponse(AbstractModel):
"""AddProduct返回参数结构体
"""
def __init__(self):
r"""
:param Product: 产品信息
:type Product: :class:`tencentcloud.iot.v20180123.models.Product`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Product = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Product") is not None:
self.Product = Product()
self.Product._deserialize(params.get("Product"))
self.RequestId = params.get("RequestId")
class AddRuleRequest(AbstractModel):
"""AddRule请求参数结构体
"""
def __init__(self):
r"""
:param Name: 名称
:type Name: str
:param Description: 描述
:type Description: str
:param Query: 查询
:type Query: :class:`tencentcloud.iot.v20180123.models.RuleQuery`
:param Actions: 转发动作列表
:type Actions: list of Action
:param DataType: 数据类型(0:文本,1:二进制)
:type DataType: int
"""
self.Name = None
self.Description = None
self.Query = None
self.Actions = None
self.DataType = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Description = params.get("Description")
if params.get("Query") is not None:
self.Query = RuleQuery()
self.Query._deserialize(params.get("Query"))
if params.get("Actions") is not None:
self.Actions = []
for item in params.get("Actions"):
obj = Action()
obj._deserialize(item)
self.Actions.append(obj)
self.DataType = params.get("DataType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddRuleResponse(AbstractModel):
"""AddRule返回参数结构体
"""
def __init__(self):
r"""
:param Rule: 规则
:type Rule: :class:`tencentcloud.iot.v20180123.models.Rule`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Rule = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Rule") is not None:
self.Rule = Rule()
self.Rule._deserialize(params.get("Rule"))
self.RequestId = params.get("RequestId")
class AddTopicRequest(AbstractModel):
"""AddTopic请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param TopicName: Topic名称
:type TopicName: str
"""
self.ProductId = None
self.TopicName = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.TopicName = params.get("TopicName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddTopicResponse(AbstractModel):
"""AddTopic返回参数结构体
"""
def __init__(self):
r"""
:param Topic: Topic信息
:type Topic: :class:`tencentcloud.iot.v20180123.models.Topic`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Topic = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Topic") is not None:
self.Topic = Topic()
self.Topic._deserialize(params.get("Topic"))
self.RequestId = params.get("RequestId")
class AppAddUserRequest(AbstractModel):
"""AppAddUser请求参数结构体
"""
def __init__(self):
r"""
:param UserName: 用户名
:type UserName: str
:param Password: 密码
:type Password: str
"""
self.UserName = None
self.Password = None
def _deserialize(self, params):
self.UserName = params.get("UserName")
self.Password = params.get("Password")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppAddUserResponse(AbstractModel):
"""AppAddUser返回参数结构体
"""
def __init__(self):
r"""
:param AppUser: 应用用户
:type AppUser: :class:`tencentcloud.iot.v20180123.models.AppUser`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AppUser = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AppUser") is not None:
self.AppUser = AppUser()
self.AppUser._deserialize(params.get("AppUser"))
self.RequestId = params.get("RequestId")
class AppDeleteDeviceRequest(AbstractModel):
"""AppDeleteDevice请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.AccessToken = None
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppDeleteDeviceResponse(AbstractModel):
"""AppDeleteDevice返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AppDevice(AbstractModel):
"""绑定设备
"""
def __init__(self):
r"""
:param DeviceId: 设备Id
:type DeviceId: str
:param ProductId: 所属产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
:param AliasName: 别名
:type AliasName: str
:param Region: 地区
:type Region: str
:param CreateTime: 创建时间
:type CreateTime: str
:param UpdateTime: 更新时间
:type UpdateTime: str
"""
self.DeviceId = None
self.ProductId = None
self.DeviceName = None
self.AliasName = None
self.Region = None
self.CreateTime = None
self.UpdateTime = None
def _deserialize(self, params):
self.DeviceId = params.get("DeviceId")
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
self.AliasName = params.get("AliasName")
self.Region = params.get("Region")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppDeviceDetail(AbstractModel):
"""绑定设备详情
"""
def __init__(self):
r"""
:param DeviceId: 设备Id
:type DeviceId: str
:param ProductId: 所属产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
:param AliasName: 别名
:type AliasName: str
:param Region: 地区
:type Region: str
:param CreateTime: 创建时间
:type CreateTime: str
:param UpdateTime: 更新时间
:type UpdateTime: str
:param DeviceInfo: 设备信息(json)
:type DeviceInfo: str
:param DataTemplate: 数据模板
:type DataTemplate: list of DataTemplate
"""
self.DeviceId = None
self.ProductId = None
self.DeviceName = None
self.AliasName = None
self.Region = None
self.CreateTime = None
self.UpdateTime = None
self.DeviceInfo = None
self.DataTemplate = None
def _deserialize(self, params):
self.DeviceId = params.get("DeviceId")
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
self.AliasName = params.get("AliasName")
self.Region = params.get("Region")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
self.DeviceInfo = params.get("DeviceInfo")
if params.get("DataTemplate") is not None:
self.DataTemplate = []
for item in params.get("DataTemplate"):
obj = DataTemplate()
obj._deserialize(item)
self.DataTemplate.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppGetDeviceDataRequest(AbstractModel):
"""AppGetDeviceData请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.AccessToken = None
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppGetDeviceDataResponse(AbstractModel):
"""AppGetDeviceData返回参数结构体
"""
def __init__(self):
r"""
:param DeviceData: 设备数据。
:type DeviceData: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DeviceData = None
self.RequestId = None
def _deserialize(self, params):
self.DeviceData = params.get("DeviceData")
self.RequestId = params.get("RequestId")
class AppGetDeviceRequest(AbstractModel):
"""AppGetDevice请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.AccessToken = None
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppGetDeviceResponse(AbstractModel):
"""AppGetDevice返回参数结构体
"""
def __init__(self):
r"""
:param AppDevice: 绑定设备详情
:type AppDevice: :class:`tencentcloud.iot.v20180123.models.AppDeviceDetail`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AppDevice = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AppDevice") is not None:
self.AppDevice = AppDeviceDetail()
self.AppDevice._deserialize(params.get("AppDevice"))
self.RequestId = params.get("RequestId")
class AppGetDeviceStatusesRequest(AbstractModel):
"""AppGetDeviceStatuses请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param DeviceIds: 设备Id列表(单次限制1000个设备)
:type DeviceIds: list of str
"""
self.AccessToken = None
self.DeviceIds = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.DeviceIds = params.get("DeviceIds")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppGetDeviceStatusesResponse(AbstractModel):
"""AppGetDeviceStatuses返回参数结构体
"""
def __init__(self):
r"""
:param DeviceStatuses: 设备状态
:type DeviceStatuses: list of DeviceStatus
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DeviceStatuses = None
self.RequestId = None
def _deserialize(self, params):
if params.get("DeviceStatuses") is not None:
self.DeviceStatuses = []
for item in params.get("DeviceStatuses"):
obj = DeviceStatus()
obj._deserialize(item)
self.DeviceStatuses.append(obj)
self.RequestId = params.get("RequestId")
class AppGetDevicesRequest(AbstractModel):
"""AppGetDevices请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
"""
self.AccessToken = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppGetDevicesResponse(AbstractModel):
"""AppGetDevices返回参数结构体
"""
def __init__(self):
r"""
:param Devices: 绑定设备列表
:type Devices: list of AppDevice
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Devices = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Devices") is not None:
self.Devices = []
for item in params.get("Devices"):
obj = AppDevice()
obj._deserialize(item)
self.Devices.append(obj)
self.RequestId = params.get("RequestId")
class AppGetTokenRequest(AbstractModel):
"""AppGetToken请求参数结构体
"""
def __init__(self):
r"""
:param UserName: 用户名
:type UserName: str
:param Password: 密码
:type Password: str
:param Expire: TTL
:type Expire: int
"""
self.UserName = None
self.Password = None
self.Expire = None
def _deserialize(self, params):
self.UserName = params.get("UserName")
self.Password = params.get("Password")
self.Expire = params.get("Expire")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppGetTokenResponse(AbstractModel):
"""AppGetToken返回参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AccessToken = None
self.RequestId = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.RequestId = params.get("RequestId")
class AppGetUserRequest(AbstractModel):
"""AppGetUser请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
"""
self.AccessToken = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppGetUserResponse(AbstractModel):
"""AppGetUser返回参数结构体
"""
def __init__(self):
r"""
:param AppUser: 用户信息
:type AppUser: :class:`tencentcloud.iot.v20180123.models.AppUser`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AppUser = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AppUser") is not None:
self.AppUser = AppUser()
self.AppUser._deserialize(params.get("AppUser"))
self.RequestId = params.get("RequestId")
class AppIssueDeviceControlRequest(AbstractModel):
"""AppIssueDeviceControl请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
:param ControlData: 控制数据(json)
:type ControlData: str
:param Metadata: 是否发送metadata字段
:type Metadata: bool
"""
self.AccessToken = None
self.ProductId = None
self.DeviceName = None
self.ControlData = None
self.Metadata = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
self.ControlData = params.get("ControlData")
self.Metadata = params.get("Metadata")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppIssueDeviceControlResponse(AbstractModel):
"""AppIssueDeviceControl返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AppResetPasswordRequest(AbstractModel):
"""AppResetPassword请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param OldPassword: 旧密码
:type OldPassword: str
:param NewPassword: 新密码
:type NewPassword: str
"""
self.AccessToken = None
self.OldPassword = None
self.NewPassword = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.OldPassword = params.get("OldPassword")
self.NewPassword = params.get("NewPassword")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppResetPasswordResponse(AbstractModel):
"""AppResetPassword返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class AppSecureAddDeviceRequest(AbstractModel):
"""AppSecureAddDevice请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param DeviceSignature: 设备签名
:type DeviceSignature: str
"""
self.AccessToken = None
self.DeviceSignature = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.DeviceSignature = params.get("DeviceSignature")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppSecureAddDeviceResponse(AbstractModel):
"""AppSecureAddDevice返回参数结构体
"""
def __init__(self):
r"""
:param AppDevice: 绑定设备信息
:type AppDevice: :class:`tencentcloud.iot.v20180123.models.AppDevice`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AppDevice = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AppDevice") is not None:
self.AppDevice = AppDevice()
self.AppDevice._deserialize(params.get("AppDevice"))
self.RequestId = params.get("RequestId")
class AppUpdateDeviceRequest(AbstractModel):
"""AppUpdateDevice请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
:param AliasName: 设备别名
:type AliasName: str
"""
self.AccessToken = None
self.ProductId = None
self.DeviceName = None
self.AliasName = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
self.AliasName = params.get("AliasName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppUpdateDeviceResponse(AbstractModel):
"""AppUpdateDevice返回参数结构体
"""
def __init__(self):
r"""
:param AppDevice: 设备信息
:type AppDevice: :class:`tencentcloud.iot.v20180123.models.AppDevice`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AppDevice = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AppDevice") is not None:
self.AppDevice = AppDevice()
self.AppDevice._deserialize(params.get("AppDevice"))
self.RequestId = params.get("RequestId")
class AppUpdateUserRequest(AbstractModel):
"""AppUpdateUser请求参数结构体
"""
def __init__(self):
r"""
:param AccessToken: 访问Token
:type AccessToken: str
:param NickName: 昵称
:type NickName: str
"""
self.AccessToken = None
self.NickName = None
def _deserialize(self, params):
self.AccessToken = params.get("AccessToken")
self.NickName = params.get("NickName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AppUpdateUserResponse(AbstractModel):
"""AppUpdateUser返回参数结构体
"""
def __init__(self):
r"""
:param AppUser: 应用用户
:type AppUser: :class:`tencentcloud.iot.v20180123.models.AppUser`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AppUser = None
self.RequestId = None
def _deserialize(self, params):
if params.get("AppUser") is not None:
self.AppUser = AppUser()
self.AppUser._deserialize(params.get("AppUser"))
self.RequestId = params.get("RequestId")
class AppUser(AbstractModel):
"""应用用户
"""
def __init__(self):
r"""
:param ApplicationId: 应用Id
:type ApplicationId: str
:param UserName: 用户名
:type UserName: str
:param NickName: 昵称
:type NickName: str
:param CreateTime: 创建时间
:type CreateTime: str
:param UpdateTime: 修改时间
:type UpdateTime: str
"""
self.ApplicationId = None
self.UserName = None
self.NickName = None
self.CreateTime = None
self.UpdateTime = None
def _deserialize(self, params):
self.ApplicationId = params.get("ApplicationId")
self.UserName = params.get("UserName")
self.NickName = params.get("NickName")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AssociateSubDeviceToGatewayProductRequest(AbstractModel):
"""AssociateSubDeviceToGatewayProduct请求参数结构体
"""
def __init__(self):
r"""
:param SubDeviceProductId: 子设备产品Id
:type SubDeviceProductId: str
:param GatewayProductId: 网关产品Id
:type GatewayProductId: str
"""
self.SubDeviceProductId = None
self.GatewayProductId = None
def _deserialize(self, params):
self.SubDeviceProductId = params.get("SubDeviceProductId")
self.GatewayProductId = params.get("GatewayProductId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AssociateSubDeviceToGatewayProductResponse(AbstractModel):
"""AssociateSubDeviceToGatewayProduct返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class BoolData(AbstractModel):
"""布尔类型数据
"""
def __init__(self):
r"""
:param Name: 名称
:type Name: str
:param Desc: 描述
:type Desc: str
:param Mode: 读写模式
:type Mode: str
:param Range: 取值列表
:type Range: list of bool
"""
self.Name = None
self.Desc = None
self.Mode = None
self.Range = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Desc = params.get("Desc")
self.Mode = params.get("Mode")
self.Range = params.get("Range")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CkafkaAction(AbstractModel):
"""转发至Ckafka
"""
def __init__(self):
r"""
:param InstanceId: 实例Id
:type InstanceId: str
:param TopicName: topic名称
:type TopicName: str
:param Region: 地域
:type Region: str
"""
self.InstanceId = None
self.TopicName = None
self.Region = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.TopicName = params.get("TopicName")
self.Region = params.get("Region")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DataHistoryEntry(AbstractModel):
"""数据历史条目
"""
def __init__(self):
r"""
:param Id: 日志id
:type Id: str
:param Timestamp: 时间戳
:type Timestamp: int
:param DeviceName: 设备名称
:type DeviceName: str
:param Data: 数据
:type Data: str
"""
self.Id = None
self.Timestamp = None
self.DeviceName = None
self.Data = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.Timestamp = params.get("Timestamp")
self.DeviceName = params.get("DeviceName")
self.Data = params.get("Data")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DataTemplate(AbstractModel):
"""数据模版
"""
def __init__(self):
r"""
:param Number: 数字类型
注意:此字段可能返回 null,表示取不到有效值。
:type Number: :class:`tencentcloud.iot.v20180123.models.NumberData`
:param String: 字符串类型
注意:此字段可能返回 null,表示取不到有效值。
:type String: :class:`tencentcloud.iot.v20180123.models.StringData`
:param Enum: 枚举类型
注意:此字段可能返回 null,表示取不到有效值。
:type Enum: :class:`tencentcloud.iot.v20180123.models.EnumData`
:param Bool: 布尔类型
注意:此字段可能返回 null,表示取不到有效值。
:type Bool: :class:`tencentcloud.iot.v20180123.models.BoolData`
"""
self.Number = None
self.String = None
self.Enum = None
self.Bool = None
def _deserialize(self, params):
if params.get("Number") is not None:
self.Number = NumberData()
self.Number._deserialize(params.get("Number"))
if params.get("String") is not None:
self.String = StringData()
self.String._deserialize(params.get("String"))
if params.get("Enum") is not None:
self.Enum = EnumData()
self.Enum._deserialize(params.get("Enum"))
if params.get("Bool") is not None:
self.Bool = BoolData()
self.Bool._deserialize(params.get("Bool"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeactivateRuleRequest(AbstractModel):
"""DeactivateRule请求参数结构体
"""
def __init__(self):
r"""
:param RuleId: 规则Id
:type RuleId: str
"""
self.RuleId = None
def _deserialize(self, params):
self.RuleId = params.get("RuleId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeactivateRuleResponse(AbstractModel):
"""DeactivateRule返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DebugLogEntry(AbstractModel):
"""设备日志条目
"""
def __init__(self):
r"""
:param Id: 日志id
:type Id: str
:param Event: 行为(事件)
:type Event: str
:param LogType: shadow/action/mqtt, 分别表示:影子/规则引擎/上下线日志
:type LogType: str
:param Timestamp: 时间戳
:type Timestamp: int
:param Result: success/fail
:type Result: str
:param Data: 日志详细内容
:type Data: str
:param Topic: 数据来源topic
:type Topic: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.Id = None
self.Event = None
self.LogType = None
self.Timestamp = None
self.Result = None
self.Data = None
self.Topic = None
self.DeviceName = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.Event = params.get("Event")
self.LogType = params.get("LogType")
self.Timestamp = params.get("Timestamp")
self.Result = params.get("Result")
self.Data = params.get("Data")
self.Topic = params.get("Topic")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteDeviceRequest(AbstractModel):
"""DeleteDevice请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteDeviceResponse(AbstractModel):
"""DeleteDevice返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteProductRequest(AbstractModel):
"""DeleteProduct请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
"""
self.ProductId = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteProductResponse(AbstractModel):
"""DeleteProduct返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteRuleRequest(AbstractModel):
"""DeleteRule请求参数结构体
"""
def __init__(self):
r"""
:param RuleId: 规则Id
:type RuleId: str
"""
self.RuleId = None
def _deserialize(self, params):
self.RuleId = params.get("RuleId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteRuleResponse(AbstractModel):
"""DeleteRule返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteTopicRequest(AbstractModel):
"""DeleteTopic请求参数结构体
"""
def __init__(self):
r"""
:param TopicId: TopicId
:type TopicId: str
:param ProductId: 产品Id
:type ProductId: str
"""
self.TopicId = None
self.ProductId = None
def _deserialize(self, params):
self.TopicId = params.get("TopicId")
self.ProductId = params.get("ProductId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteTopicResponse(AbstractModel):
"""DeleteTopic返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class Device(AbstractModel):
"""设备
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
:param DeviceSecret: 设备密钥
:type DeviceSecret: str
:param UpdateTime: 更新时间
:type UpdateTime: str
:param CreateTime: 创建时间
:type CreateTime: str
:param DeviceInfo: 设备信息(json)
:type DeviceInfo: str
"""
self.ProductId = None
self.DeviceName = None
self.DeviceSecret = None
self.UpdateTime = None
self.CreateTime = None
self.DeviceInfo = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
self.DeviceSecret = params.get("DeviceSecret")
self.UpdateTime = params.get("UpdateTime")
self.CreateTime = params.get("CreateTime")
self.DeviceInfo = params.get("DeviceInfo")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeviceEntry(AbstractModel):
"""设备条目
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
:param DeviceSecret: 设备密钥
:type DeviceSecret: str
:param CreateTime: 创建时间
:type CreateTime: str
"""
self.ProductId = None
self.DeviceName = None
self.DeviceSecret = None
self.CreateTime = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
self.DeviceSecret = params.get("DeviceSecret")
self.CreateTime = params.get("CreateTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeviceLogEntry(AbstractModel):
"""设备日志条目
"""
def __init__(self):
r"""
:param Id: 日志id
:type Id: str
:param Msg: 日志内容
:type Msg: str
:param Code: 状态码
:type Code: str
:param Timestamp: 时间戳
:type Timestamp: int
:param DeviceName: 设备名称
:type DeviceName: str
:param Method: 设备动作
:type Method: str
"""
self.Id = None
self.Msg = None
self.Code = None
self.Timestamp = None
self.DeviceName = None
self.Method = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.Msg = params.get("Msg")
self.Code = params.get("Code")
self.Timestamp = params.get("Timestamp")
self.DeviceName = params.get("DeviceName")
self.Method = params.get("Method")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeviceSignature(AbstractModel):
"""设备签名
"""
def __init__(self):
r"""
:param DeviceName: 设备名称
:type DeviceName: str
:param DeviceSignature: 设备签名
:type DeviceSignature: str
"""
self.DeviceName = None
self.DeviceSignature = None
def _deserialize(self, params):
self.DeviceName = params.get("DeviceName")
self.DeviceSignature = params.get("DeviceSignature")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeviceStatData(AbstractModel):
"""设备统计数据
"""
def __init__(self):
r"""
:param Datetime: 时间点
:type Datetime: str
:param DeviceOnline: 在线设备数
:type DeviceOnline: int
:param DeviceActive: 激活设备数
:type DeviceActive: int
:param DeviceTotal: 设备总数
:type DeviceTotal: int
"""
self.Datetime = None
self.DeviceOnline = None
self.DeviceActive = None
self.DeviceTotal = None
def _deserialize(self, params):
self.Datetime = params.get("Datetime")
self.DeviceOnline = params.get("DeviceOnline")
self.DeviceActive = params.get("DeviceActive")
self.DeviceTotal = params.get("DeviceTotal")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeviceStatus(AbstractModel):
"""设备状态
"""
def __init__(self):
r"""
:param DeviceName: 设备名称
:type DeviceName: str
:param Status: 设备状态(inactive, online, offline)
:type Status: str
:param FirstOnline: 首次上线时间
注意:此字段可能返回 null,表示取不到有效值。
:type FirstOnline: str
:param LastOnline: 最后上线时间
注意:此字段可能返回 null,表示取不到有效值。
:type LastOnline: str
:param OnlineTimes: 上线次数
:type OnlineTimes: int
"""
self.DeviceName = None
self.Status = None
self.FirstOnline = None
self.LastOnline = None
self.OnlineTimes = None
def _deserialize(self, params):
self.DeviceName = params.get("DeviceName")
self.Status = params.get("Status")
self.FirstOnline = params.get("FirstOnline")
self.LastOnline = params.get("LastOnline")
self.OnlineTimes = params.get("OnlineTimes")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class EnumData(AbstractModel):
"""枚举类型数据
"""
def __init__(self):
r"""
:param Name: 名称
:type Name: str
:param Desc: 描述
:type Desc: str
:param Mode: 读写模式
:type Mode: str
:param Range: 取值列表
:type Range: list of str
"""
self.Name = None
self.Desc = None
self.Mode = None
self.Range = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Desc = params.get("Desc")
self.Mode = params.get("Mode")
self.Range = params.get("Range")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDataHistoryRequest(AbstractModel):
"""GetDataHistory请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceNames: 设备名称列表,允许最多一次100台
:type DeviceNames: list of str
:param StartTime: 查询开始时间
:type StartTime: str
:param EndTime: 查询结束时间
:type EndTime: str
:param Size: 查询数据量
:type Size: int
:param Order: 时间排序(desc/asc)
:type Order: str
:param ScrollId: 查询游标
:type ScrollId: str
"""
self.ProductId = None
self.DeviceNames = None
self.StartTime = None
self.EndTime = None
self.Size = None
self.Order = None
self.ScrollId = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceNames = params.get("DeviceNames")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Size = params.get("Size")
self.Order = params.get("Order")
self.ScrollId = params.get("ScrollId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDataHistoryResponse(AbstractModel):
"""GetDataHistory返回参数结构体
"""
def __init__(self):
r"""
:param DataHistory: 数据历史
:type DataHistory: list of DataHistoryEntry
:param ScrollId: 查询游标
:type ScrollId: str
:param ScrollTimeout: 查询游标超时
:type ScrollTimeout: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DataHistory = None
self.ScrollId = None
self.ScrollTimeout = None
self.RequestId = None
def _deserialize(self, params):
if params.get("DataHistory") is not None:
self.DataHistory = []
for item in params.get("DataHistory"):
obj = DataHistoryEntry()
obj._deserialize(item)
self.DataHistory.append(obj)
self.ScrollId = params.get("ScrollId")
self.ScrollTimeout = params.get("ScrollTimeout")
self.RequestId = params.get("RequestId")
class GetDebugLogRequest(AbstractModel):
"""GetDebugLog请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceNames: 设备名称列表,最大支持100台
:type DeviceNames: list of str
:param StartTime: 查询开始时间
:type StartTime: str
:param EndTime: 查询结束时间
:type EndTime: str
:param Size: 查询数据量
:type Size: int
:param Order: 时间排序(desc/asc)
:type Order: str
:param ScrollId: 查询游标
:type ScrollId: str
:param Type: 日志类型(shadow/action/mqtt)
:type Type: str
"""
self.ProductId = None
self.DeviceNames = None
self.StartTime = None
self.EndTime = None
self.Size = None
self.Order = None
self.ScrollId = None
self.Type = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceNames = params.get("DeviceNames")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Size = params.get("Size")
self.Order = params.get("Order")
self.ScrollId = params.get("ScrollId")
self.Type = params.get("Type")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDebugLogResponse(AbstractModel):
"""GetDebugLog返回参数结构体
"""
def __init__(self):
r"""
:param DebugLog: 调试日志
:type DebugLog: list of DebugLogEntry
:param ScrollId: 查询游标
:type ScrollId: str
:param ScrollTimeout: 游标超时
:type ScrollTimeout: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DebugLog = None
self.ScrollId = None
self.ScrollTimeout = None
self.RequestId = None
def _deserialize(self, params):
if params.get("DebugLog") is not None:
self.DebugLog = []
for item in params.get("DebugLog"):
obj = DebugLogEntry()
obj._deserialize(item)
self.DebugLog.append(obj)
self.ScrollId = params.get("ScrollId")
self.ScrollTimeout = params.get("ScrollTimeout")
self.RequestId = params.get("RequestId")
class GetDeviceDataRequest(AbstractModel):
"""GetDeviceData请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDeviceDataResponse(AbstractModel):
"""GetDeviceData返回参数结构体
"""
def __init__(self):
r"""
:param DeviceData: 设备数据
:type DeviceData: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DeviceData = None
self.RequestId = None
def _deserialize(self, params):
self.DeviceData = params.get("DeviceData")
self.RequestId = params.get("RequestId")
class GetDeviceLogRequest(AbstractModel):
"""GetDeviceLog请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceNames: 设备名称列表,最大支持100台
:type DeviceNames: list of str
:param StartTime: 查询开始时间
:type StartTime: str
:param EndTime: 查询结束时间
:type EndTime: str
:param Size: 查询数据量
:type Size: int
:param Order: 时间排序(desc/asc)
:type Order: str
:param ScrollId: 查询游标
:type ScrollId: str
:param Type: 日志类型(comm/status)
:type Type: str
"""
self.ProductId = None
self.DeviceNames = None
self.StartTime = None
self.EndTime = None
self.Size = None
self.Order = None
self.ScrollId = None
self.Type = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceNames = params.get("DeviceNames")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Size = params.get("Size")
self.Order = params.get("Order")
self.ScrollId = params.get("ScrollId")
self.Type = params.get("Type")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDeviceLogResponse(AbstractModel):
"""GetDeviceLog返回参数结构体
"""
def __init__(self):
r"""
:param DeviceLog: 设备日志
:type DeviceLog: list of DeviceLogEntry
:param ScrollId: 查询游标
:type ScrollId: str
:param ScrollTimeout: 游标超时
:type ScrollTimeout: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DeviceLog = None
self.ScrollId = None
self.ScrollTimeout = None
self.RequestId = None
def _deserialize(self, params):
if params.get("DeviceLog") is not None:
self.DeviceLog = []
for item in params.get("DeviceLog"):
obj = DeviceLogEntry()
obj._deserialize(item)
self.DeviceLog.append(obj)
self.ScrollId = params.get("ScrollId")
self.ScrollTimeout = params.get("ScrollTimeout")
self.RequestId = params.get("RequestId")
class GetDeviceRequest(AbstractModel):
"""GetDevice请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDeviceResponse(AbstractModel):
"""GetDevice返回参数结构体
"""
def __init__(self):
r"""
:param Device: 设备信息
:type Device: :class:`tencentcloud.iot.v20180123.models.Device`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Device = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Device") is not None:
self.Device = Device()
self.Device._deserialize(params.get("Device"))
self.RequestId = params.get("RequestId")
class GetDeviceSignaturesRequest(AbstractModel):
"""GetDeviceSignatures请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品ID
:type ProductId: str
:param DeviceNames: 设备名称列表(单次限制1000个设备)
:type DeviceNames: list of str
:param Expire: 过期时间
:type Expire: int
"""
self.ProductId = None
self.DeviceNames = None
self.Expire = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceNames = params.get("DeviceNames")
self.Expire = params.get("Expire")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDeviceSignaturesResponse(AbstractModel):
"""GetDeviceSignatures返回参数结构体
"""
def __init__(self):
r"""
:param DeviceSignatures: 设备绑定签名列表
:type DeviceSignatures: list of DeviceSignature
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DeviceSignatures = None
self.RequestId = None
def _deserialize(self, params):
if params.get("DeviceSignatures") is not None:
self.DeviceSignatures = []
for item in params.get("DeviceSignatures"):
obj = DeviceSignature()
obj._deserialize(item)
self.DeviceSignatures.append(obj)
self.RequestId = params.get("RequestId")
class GetDeviceStatisticsRequest(AbstractModel):
"""GetDeviceStatistics请求参数结构体
"""
def __init__(self):
r"""
:param Products: 产品Id列表
:type Products: list of str
:param StartDate: 开始日期
:type StartDate: str
:param EndDate: 结束日期
:type EndDate: str
"""
self.Products = None
self.StartDate = None
self.EndDate = None
def _deserialize(self, params):
self.Products = params.get("Products")
self.StartDate = params.get("StartDate")
self.EndDate = params.get("EndDate")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDeviceStatisticsResponse(AbstractModel):
"""GetDeviceStatistics返回参数结构体
"""
def __init__(self):
r"""
:param DeviceStatistics: 统计数据
:type DeviceStatistics: list of DeviceStatData
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DeviceStatistics = None
self.RequestId = None
def _deserialize(self, params):
if params.get("DeviceStatistics") is not None:
self.DeviceStatistics = []
for item in params.get("DeviceStatistics"):
obj = DeviceStatData()
obj._deserialize(item)
self.DeviceStatistics.append(obj)
self.RequestId = params.get("RequestId")
class GetDeviceStatusesRequest(AbstractModel):
"""GetDeviceStatuses请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品ID
:type ProductId: str
:param DeviceNames: 设备名称列表(单次限制1000个设备)
:type DeviceNames: list of str
"""
self.ProductId = None
self.DeviceNames = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceNames = params.get("DeviceNames")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDeviceStatusesResponse(AbstractModel):
"""GetDeviceStatuses返回参数结构体
"""
def __init__(self):
r"""
:param DeviceStatuses: 设备状态列表
:type DeviceStatuses: list of DeviceStatus
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DeviceStatuses = None
self.RequestId = None
def _deserialize(self, params):
if params.get("DeviceStatuses") is not None:
self.DeviceStatuses = []
for item in params.get("DeviceStatuses"):
obj = DeviceStatus()
obj._deserialize(item)
self.DeviceStatuses.append(obj)
self.RequestId = params.get("RequestId")
class GetDevicesRequest(AbstractModel):
"""GetDevices请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param Offset: 偏移
:type Offset: int
:param Length: 长度
:type Length: int
:param Keyword: 关键字查询
:type Keyword: str
"""
self.ProductId = None
self.Offset = None
self.Length = None
self.Keyword = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.Offset = params.get("Offset")
self.Length = params.get("Length")
self.Keyword = params.get("Keyword")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetDevicesResponse(AbstractModel):
"""GetDevices返回参数结构体
"""
def __init__(self):
r"""
:param Devices: 设备列表
:type Devices: list of DeviceEntry
:param Total: 设备总数
:type Total: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Devices = None
self.Total = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Devices") is not None:
self.Devices = []
for item in params.get("Devices"):
obj = DeviceEntry()
obj._deserialize(item)
self.Devices.append(obj)
self.Total = params.get("Total")
self.RequestId = params.get("RequestId")
class GetProductRequest(AbstractModel):
"""GetProduct请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
"""
self.ProductId = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetProductResponse(AbstractModel):
"""GetProduct返回参数结构体
"""
def __init__(self):
r"""
:param Product: 产品信息
:type Product: :class:`tencentcloud.iot.v20180123.models.Product`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Product = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Product") is not None:
self.Product = Product()
self.Product._deserialize(params.get("Product"))
self.RequestId = params.get("RequestId")
class GetProductsRequest(AbstractModel):
"""GetProducts请求参数结构体
"""
def __init__(self):
r"""
:param Offset: 偏移
:type Offset: int
:param Length: 长度
:type Length: int
"""
self.Offset = None
self.Length = None
def _deserialize(self, params):
self.Offset = params.get("Offset")
self.Length = params.get("Length")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetProductsResponse(AbstractModel):
"""GetProducts返回参数结构体
"""
def __init__(self):
r"""
:param Products: Product列表
:type Products: list of ProductEntry
:param Total: Product总数
:type Total: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Products = None
self.Total = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Products") is not None:
self.Products = []
for item in params.get("Products"):
obj = ProductEntry()
obj._deserialize(item)
self.Products.append(obj)
self.Total = params.get("Total")
self.RequestId = params.get("RequestId")
class GetRuleRequest(AbstractModel):
"""GetRule请求参数结构体
"""
def __init__(self):
r"""
:param RuleId: 规则Id
:type RuleId: str
"""
self.RuleId = None
def _deserialize(self, params):
self.RuleId = params.get("RuleId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetRuleResponse(AbstractModel):
"""GetRule返回参数结构体
"""
def __init__(self):
r"""
:param Rule: 规则
:type Rule: :class:`tencentcloud.iot.v20180123.models.Rule`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Rule = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Rule") is not None:
self.Rule = Rule()
self.Rule._deserialize(params.get("Rule"))
self.RequestId = params.get("RequestId")
class GetRulesRequest(AbstractModel):
"""GetRules请求参数结构体
"""
def __init__(self):
r"""
:param Offset: 偏移
:type Offset: int
:param Length: 长度
:type Length: int
"""
self.Offset = None
self.Length = None
def _deserialize(self, params):
self.Offset = params.get("Offset")
self.Length = params.get("Length")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetRulesResponse(AbstractModel):
"""GetRules返回参数结构体
"""
def __init__(self):
r"""
:param Rules: 规则列表
:type Rules: list of Rule
:param Total: 规则总数
:type Total: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Rules = None
self.Total = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Rules") is not None:
self.Rules = []
for item in params.get("Rules"):
obj = Rule()
obj._deserialize(item)
self.Rules.append(obj)
self.Total = params.get("Total")
self.RequestId = params.get("RequestId")
class GetTopicRequest(AbstractModel):
"""GetTopic请求参数结构体
"""
def __init__(self):
r"""
:param TopicId: TopicId
:type TopicId: str
:param ProductId: 产品Id
:type ProductId: str
"""
self.TopicId = None
self.ProductId = None
def _deserialize(self, params):
self.TopicId = params.get("TopicId")
self.ProductId = params.get("ProductId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetTopicResponse(AbstractModel):
"""GetTopic返回参数结构体
"""
def __init__(self):
r"""
:param Topic: Topic信息
:type Topic: :class:`tencentcloud.iot.v20180123.models.Topic`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Topic = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Topic") is not None:
self.Topic = Topic()
self.Topic._deserialize(params.get("Topic"))
self.RequestId = params.get("RequestId")
class GetTopicsRequest(AbstractModel):
"""GetTopics请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param Offset: 偏移
:type Offset: int
:param Length: 长度
:type Length: int
"""
self.ProductId = None
self.Offset = None
self.Length = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.Offset = params.get("Offset")
self.Length = params.get("Length")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GetTopicsResponse(AbstractModel):
"""GetTopics返回参数结构体
"""
def __init__(self):
r"""
:param Topics: Topic列表
:type Topics: list of Topic
:param Total: Topic总数
:type Total: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Topics = None
self.Total = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Topics") is not None:
self.Topics = []
for item in params.get("Topics"):
obj = Topic()
obj._deserialize(item)
self.Topics.append(obj)
self.Total = params.get("Total")
self.RequestId = params.get("RequestId")
class IssueDeviceControlRequest(AbstractModel):
"""IssueDeviceControl请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
:param ControlData: 控制数据(json)
:type ControlData: str
:param Metadata: 是否发送metadata字段
:type Metadata: bool
"""
self.ProductId = None
self.DeviceName = None
self.ControlData = None
self.Metadata = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
self.ControlData = params.get("ControlData")
self.Metadata = params.get("Metadata")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class IssueDeviceControlResponse(AbstractModel):
"""IssueDeviceControl返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class NumberData(AbstractModel):
"""数字类型数据
"""
def __init__(self):
r"""
:param Name: 名称
:type Name: str
:param Desc: 描述
:type Desc: str
:param Mode: 读写模式
:type Mode: str
:param Range: 取值范围
:type Range: list of float
"""
self.Name = None
self.Desc = None
self.Mode = None
self.Range = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Desc = params.get("Desc")
self.Mode = params.get("Mode")
self.Range = params.get("Range")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class Product(AbstractModel):
"""产品
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param ProductKey: 产品Key
:type ProductKey: str
:param AppId: AppId
:type AppId: int
:param Name: 产品名称
:type Name: str
:param Description: 产品描述
:type Description: str
:param Domain: 连接域名
:type Domain: str
:param Standard: 产品规格
:type Standard: int
:param AuthType: 鉴权类型(0:直连,1:Token)
:type AuthType: int
:param Deleted: 删除(0未删除)
:type Deleted: int
:param Message: 备注
:type Message: str
:param CreateTime: 创建时间
:type CreateTime: str
:param UpdateTime: 更新时间
:type UpdateTime: str
:param DataTemplate: 数据模版
:type DataTemplate: list of DataTemplate
:param DataProtocol: 数据协议(native/template)
:type DataProtocol: str
:param Username: 直连用户名
:type Username: str
:param Password: 直连密码
:type Password: str
:param CommProtocol: 通信方式
:type CommProtocol: str
:param Qps: qps
:type Qps: int
:param Region: 地域
:type Region: str
:param DeviceType: 产品的设备类型
:type DeviceType: str
:param AssociatedProducts: 关联的产品列表
:type AssociatedProducts: list of str
"""
self.ProductId = None
self.ProductKey = None
self.AppId = None
self.Name = None
self.Description = None
self.Domain = None
self.Standard = None
self.AuthType = None
self.Deleted = None
self.Message = None
self.CreateTime = None
self.UpdateTime = None
self.DataTemplate = None
self.DataProtocol = None
self.Username = None
self.Password = None
self.CommProtocol = None
self.Qps = None
self.Region = None
self.DeviceType = None
self.AssociatedProducts = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.ProductKey = params.get("ProductKey")
self.AppId = params.get("AppId")
self.Name = params.get("Name")
self.Description = params.get("Description")
self.Domain = params.get("Domain")
self.Standard = params.get("Standard")
self.AuthType = params.get("AuthType")
self.Deleted = params.get("Deleted")
self.Message = params.get("Message")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
if params.get("DataTemplate") is not None:
self.DataTemplate = []
for item in params.get("DataTemplate"):
obj = DataTemplate()
obj._deserialize(item)
self.DataTemplate.append(obj)
self.DataProtocol = params.get("DataProtocol")
self.Username = params.get("Username")
self.Password = params.get("Password")
self.CommProtocol = params.get("CommProtocol")
self.Qps = params.get("Qps")
self.Region = params.get("Region")
self.DeviceType = params.get("DeviceType")
self.AssociatedProducts = params.get("AssociatedProducts")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ProductEntry(AbstractModel):
"""产品条目
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param ProductKey: 产品Key
:type ProductKey: str
:param AppId: AppId
:type AppId: int
:param Name: 产品名称
:type Name: str
:param Description: 产品描述
:type Description: str
:param Domain: 连接域名
:type Domain: str
:param AuthType: 鉴权类型(0:直连,1:Token)
:type AuthType: int
:param DataProtocol: 数据协议(native/template)
:type DataProtocol: str
:param Deleted: 删除(0未删除)
:type Deleted: int
:param Message: 备注
:type Message: str
:param CreateTime: 创建时间
:type CreateTime: str
:param CommProtocol: 通信方式
:type CommProtocol: str
:param Region: 地域
:type Region: str
:param DeviceType: 设备类型
:type DeviceType: str
"""
self.ProductId = None
self.ProductKey = None
self.AppId = None
self.Name = None
self.Description = None
self.Domain = None
self.AuthType = None
self.DataProtocol = None
self.Deleted = None
self.Message = None
self.CreateTime = None
self.CommProtocol = None
self.Region = None
self.DeviceType = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.ProductKey = params.get("ProductKey")
self.AppId = params.get("AppId")
self.Name = params.get("Name")
self.Description = params.get("Description")
self.Domain = params.get("Domain")
self.AuthType = params.get("AuthType")
self.DataProtocol = params.get("DataProtocol")
self.Deleted = params.get("Deleted")
self.Message = params.get("Message")
self.CreateTime = params.get("CreateTime")
self.CommProtocol = params.get("CommProtocol")
self.Region = params.get("Region")
self.DeviceType = params.get("DeviceType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class PublishMsgRequest(AbstractModel):
"""PublishMsg请求参数结构体
"""
def __init__(self):
r"""
:param Topic: Topic
:type Topic: str
:param Message: 消息内容
:type Message: str
:param Qos: Qos(目前QoS支持0与1)
:type Qos: int
"""
self.Topic = None
self.Message = None
self.Qos = None
def _deserialize(self, params):
self.Topic = params.get("Topic")
self.Message = params.get("Message")
self.Qos = params.get("Qos")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class PublishMsgResponse(AbstractModel):
"""PublishMsg返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ResetDeviceRequest(AbstractModel):
"""ResetDevice请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param DeviceName: 设备名称
:type DeviceName: str
"""
self.ProductId = None
self.DeviceName = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.DeviceName = params.get("DeviceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ResetDeviceResponse(AbstractModel):
"""ResetDevice返回参数结构体
"""
def __init__(self):
r"""
:param Device: 设备信息
:type Device: :class:`tencentcloud.iot.v20180123.models.Device`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Device = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Device") is not None:
self.Device = Device()
self.Device._deserialize(params.get("Device"))
self.RequestId = params.get("RequestId")
class Rule(AbstractModel):
"""规则
"""
def __init__(self):
r"""
:param RuleId: 规则Id
:type RuleId: str
:param AppId: AppId
:type AppId: int
:param Name: 名称
:type Name: str
:param Description: 描述
:type Description: str
:param Query: 查询
:type Query: :class:`tencentcloud.iot.v20180123.models.RuleQuery`
:param Actions: 转发
:type Actions: list of Action
:param Active: 已启动
:type Active: int
:param Deleted: 已删除
:type Deleted: int
:param CreateTime: 创建时间
:type CreateTime: str
:param UpdateTime: 更新时间
:type UpdateTime: str
:param MsgOrder: 消息顺序
:type MsgOrder: int
:param DataType: 数据类型(0:文本,1:二进制)
:type DataType: int
"""
self.RuleId = None
self.AppId = None
self.Name = None
self.Description = None
self.Query = None
self.Actions = None
self.Active = None
self.Deleted = None
self.CreateTime = None
self.UpdateTime = None
self.MsgOrder = None
self.DataType = None
def _deserialize(self, params):
self.RuleId = params.get("RuleId")
self.AppId = params.get("AppId")
self.Name = params.get("Name")
self.Description = params.get("Description")
if params.get("Query") is not None:
self.Query = RuleQuery()
self.Query._deserialize(params.get("Query"))
if params.get("Actions") is not None:
self.Actions = []
for item in params.get("Actions"):
obj = Action()
obj._deserialize(item)
self.Actions.append(obj)
self.Active = params.get("Active")
self.Deleted = params.get("Deleted")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
self.MsgOrder = params.get("MsgOrder")
self.DataType = params.get("DataType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RuleQuery(AbstractModel):
"""查询
"""
def __init__(self):
r"""
:param Field: 字段
:type Field: str
:param Condition: 过滤规则
:type Condition: str
:param Topic: Topic
注意:此字段可能返回 null,表示取不到有效值。
:type Topic: str
:param ProductId: 产品Id
注意:此字段可能返回 null,表示取不到有效值。
:type ProductId: str
"""
self.Field = None
self.Condition = None
self.Topic = None
self.ProductId = None
def _deserialize(self, params):
self.Field = params.get("Field")
self.Condition = params.get("Condition")
self.Topic = params.get("Topic")
self.ProductId = params.get("ProductId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ServiceAction(AbstractModel):
"""转发到第三方http(s)服务
"""
def __init__(self):
r"""
:param Url: 服务url地址
:type Url: str
"""
self.Url = None
def _deserialize(self, params):
self.Url = params.get("Url")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class StringData(AbstractModel):
"""数字类型数据
"""
def __init__(self):
r"""
:param Name: 名称
:type Name: str
:param Desc: 描述
:type Desc: str
:param Mode: 读写模式
:type Mode: str
:param Range: 长度范围
:type Range: list of int non-negative
"""
self.Name = None
self.Desc = None
self.Mode = None
self.Range = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Desc = params.get("Desc")
self.Mode = params.get("Mode")
self.Range = params.get("Range")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class Topic(AbstractModel):
"""Topic
"""
def __init__(self):
r"""
:param TopicId: TopicId
:type TopicId: str
:param TopicName: Topic名称
:type TopicName: str
:param ProductId: 产品Id
:type ProductId: str
:param MsgLife: 消息最大生命周期
:type MsgLife: int
:param MsgSize: 消息最大大小
:type MsgSize: int
:param MsgCount: 消息最大数量
:type MsgCount: int
:param Deleted: 已删除
:type Deleted: int
:param Path: Topic完整路径
:type Path: str
:param CreateTime: 创建时间
:type CreateTime: str
:param UpdateTime: 更新时间
:type UpdateTime: str
"""
self.TopicId = None
self.TopicName = None
self.ProductId = None
self.MsgLife = None
self.MsgSize = None
self.MsgCount = None
self.Deleted = None
self.Path = None
self.CreateTime = None
self.UpdateTime = None
def _deserialize(self, params):
self.TopicId = params.get("TopicId")
self.TopicName = params.get("TopicName")
self.ProductId = params.get("ProductId")
self.MsgLife = params.get("MsgLife")
self.MsgSize = params.get("MsgSize")
self.MsgCount = params.get("MsgCount")
self.Deleted = params.get("Deleted")
self.Path = params.get("Path")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class TopicAction(AbstractModel):
"""转发到topic动作
"""
def __init__(self):
r"""
:param Topic: 目标topic
:type Topic: str
"""
self.Topic = None
def _deserialize(self, params):
self.Topic = params.get("Topic")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UnassociateSubDeviceFromGatewayProductRequest(AbstractModel):
"""UnassociateSubDeviceFromGatewayProduct请求参数结构体
"""
def __init__(self):
r"""
:param SubDeviceProductId: 子设备产品Id
:type SubDeviceProductId: str
:param GatewayProductId: 网关设备产品Id
:type GatewayProductId: str
"""
self.SubDeviceProductId = None
self.GatewayProductId = None
def _deserialize(self, params):
self.SubDeviceProductId = params.get("SubDeviceProductId")
self.GatewayProductId = params.get("GatewayProductId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UnassociateSubDeviceFromGatewayProductResponse(AbstractModel):
"""UnassociateSubDeviceFromGatewayProduct返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class UpdateProductRequest(AbstractModel):
"""UpdateProduct请求参数结构体
"""
def __init__(self):
r"""
:param ProductId: 产品Id
:type ProductId: str
:param Name: 产品名称
:type Name: str
:param Description: 产品描述
:type Description: str
:param DataTemplate: 数据模版
:type DataTemplate: list of DataTemplate
"""
self.ProductId = None
self.Name = None
self.Description = None
self.DataTemplate = None
def _deserialize(self, params):
self.ProductId = params.get("ProductId")
self.Name = params.get("Name")
self.Description = params.get("Description")
if params.get("DataTemplate") is not None:
self.DataTemplate = []
for item in params.get("DataTemplate"):
obj = DataTemplate()
obj._deserialize(item)
self.DataTemplate.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UpdateProductResponse(AbstractModel):
"""UpdateProduct返回参数结构体
"""
def __init__(self):
r"""
:param Product: 更新后的产品信息
:type Product: :class:`tencentcloud.iot.v20180123.models.Product`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Product = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Product") is not None:
self.Product = Product()
self.Product._deserialize(params.get("Product"))
self.RequestId = params.get("RequestId")
class UpdateRuleRequest(AbstractModel):
"""UpdateRule请求参数结构体
"""
def __init__(self):
r"""
:param RuleId: 规则Id
:type RuleId: str
:param Name: 名称
:type Name: str
:param Description: 描述
:type Description: str
:param Query: 查询
:type Query: :class:`tencentcloud.iot.v20180123.models.RuleQuery`
:param Actions: 转发动作列表
:type Actions: list of Action
:param DataType: 数据类型(0:文本,1:二进制)
:type DataType: int
"""
self.RuleId = None
self.Name = None
self.Description = None
self.Query = None
self.Actions = None
self.DataType = None
def _deserialize(self, params):
self.RuleId = params.get("RuleId")
self.Name = params.get("Name")
self.Description = params.get("Description")
if params.get("Query") is not None:
self.Query = RuleQuery()
self.Query._deserialize(params.get("Query"))
if params.get("Actions") is not None:
self.Actions = []
for item in params.get("Actions"):
obj = Action()
obj._deserialize(item)
self.Actions.append(obj)
self.DataType = params.get("DataType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UpdateRuleResponse(AbstractModel):
"""UpdateRule返回参数结构体
"""
def __init__(self):
r"""
:param Rule: 规则
:type Rule: :class:`tencentcloud.iot.v20180123.models.Rule`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Rule = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Rule") is not None:
self.Rule = Rule()
self.Rule._deserialize(params.get("Rule"))
self.RequestId = params.get("RequestId") | tzpBingo/github-trending | codespace/python/tencentcloud/iot/v20180123/models.py | Python | mit | 107,334 |
# coding: utf-8
# PYTHON IMPORTS
from tempfile import NamedTemporaryFile
# DJANGO IMPORTS
from django.utils.translation import ugettext as _
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.core.files import File
# FILEBROWSER IMPORTS
from filebrowser.settings import *
# PIL import
if STRICT_PIL:
from PIL import Image
else:
try:
from PIL import Image
except ImportError:
import Image
def applies_to_all_images(fileobject):
return fileobject.filetype == 'Image'
def transpose_image(request, fileobjects, operation):
for fileobject in fileobjects:
root, ext = os.path.splitext(fileobject.filename)
f = fileobject.site.storage.open(fileobject.path)
im = Image.open(f)
new_image = im.transpose(operation)
tmpfile = File(NamedTemporaryFile())
try:
new_image.save(tmpfile, format=Image.EXTENSION[ext], quality=VERSION_QUALITY, optimize=(os.path.splitext(fileobject.path)[1].lower() != '.gif'))
except IOError:
new_image.save(tmpfile, format=Image.EXTENSION[ext], quality=VERSION_QUALITY)
try:
saved_under = fileobject.site.storage.save(fileobject.path, tmpfile)
if saved_under != fileobject.path:
fileobject.site.storage.move(saved_under, fileobject.path)
fileobject.delete_versions()
finally:
tmpfile.close()
f.close()
messages.add_message(request, messages.SUCCESS, _("Action applied successfully to '%s'" % (fileobject.filename)))
def flip_horizontal(request, fileobjects):
transpose_image(request, fileobjects, 0)
flip_horizontal.short_description = _(u'Flip horizontal')
flip_horizontal.applies_to = applies_to_all_images
def flip_vertical(request, fileobjects):
transpose_image(request, fileobjects, 1)
flip_vertical.short_description = _(u'Flip vertical')
flip_vertical.applies_to = applies_to_all_images
def rotate_90_clockwise(request, fileobjects):
transpose_image(request, fileobjects, 4)
rotate_90_clockwise.short_description = _(u'Rotate 90° CW')
rotate_90_clockwise.applies_to = applies_to_all_images
def rotate_90_counterclockwise(request, fileobjects):
transpose_image(request, fileobjects, 2)
rotate_90_counterclockwise.short_description = _(u'Rotate 90° CCW')
rotate_90_counterclockwise.applies_to = applies_to_all_images
def rotate_180(request, fileobjects):
transpose_image(request, fileobjects, 3)
rotate_180.short_description = _(u'Rotate 180°')
rotate_180.applies_to = applies_to_all_images
| yaroslavprogrammer/django-filebrowser-no-grappelli | filebrowser/actions.py | Python | bsd-3-clause | 2,643 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'ProjectImage.desc'
db.delete_column('portfolio_projectimage', 'desc')
def backwards(self, orm):
# Adding field 'ProjectImage.desc'
db.add_column('portfolio_projectimage', 'desc', self.gf('django.db.models.fields.TextField')(default='', blank=True), keep_default=False)
models = {
'portfolio.category': {
'Meta': {'ordering': "['position']", 'object_name': 'Category'},
'description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'position': ('django.db.models.fields.PositiveIntegerField', [], {}),
'short_description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'portfolio.project': {
'Meta': {'ordering': "['-start_date', '-end_date']", 'object_name': 'Project'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portfolio.Category']"}),
'client': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'markup': ('django.db.models.fields.CharField', [], {'default': "'m'", 'max_length': '1'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'rendered_description': ('django.db.models.fields.TextField', [], {}),
'short_description': ('django.db.models.fields.TextField', [], {}),
'site_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'tagline': ('django.db.models.fields.CharField', [], {'max_length': '400'})
},
'portfolio.projectimage': {
'Meta': {'object_name': 'ProjectImage'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portfolio.Project']"})
}
}
complete_apps = ['portfolio']
| olofsj/tokotoko-portfolio | portfolio/migrations/0008_auto__del_field_projectimage_desc.py | Python | bsd-3-clause | 3,139 |
######################################################################
#
# Copyright (C) 2013
# Associated Universities, Inc. Washington DC, USA,
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Library General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public
# License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 675 Massachusetts Ave, Cambridge, MA 02139, USA.
#
# Correspondence concerning VLA Pipelines should be addressed as follows:
# Please register and submit helpdesk tickets via: https://help.nrao.edu
# Postal address:
# National Radio Astronomy Observatory
# VLA Pipeline Support Office
# PO Box O
# Socorro, NM, USA
#
######################################################################
# MAKE GAIN TABLE FOR FLUX DENSITY BOOTSTRAPPING
# Make a gain table that includes gain and opacity corrections for final
# amp cal, for flux density bootstrapping
logprint ("Starting EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'start')
QA2_fluxgains='Pass'
#logprint ("Making fresh calibrators.ms", logfileout='logs/fluxgains.log')
#
#syscommand='rm -rf calibrators.ms'
#os.system(syscommand)
#
#default('split')
#vis=ms_active
#outputvis='calibrators.ms'
#datacolumn='corrected'
#field=''
#spw=''
#width=int(max(channels))
#antenna=''
#timebin='0s'
#timerange=''
#scan=calibrator_scan_select_string
#intent=''
#array=''
#uvrange=''
#correlation=''
#observation=''
#keepflags=False
#split()
logprint ("Setting models for standard primary calibrators", logfileout='logs/fluxgains.log')
tb.open('calibrators.ms')
positions = []
for ii in range(0,len(field_positions[0][0])):
positions.append([field_positions[0][0][ii], field_positions[1][0][ii]])
standard_source_names = [ '3C48', '3C138', '3C147', '3C286' ]
standard_source_fields = find_standards(positions)
ii=0
for fields in standard_source_fields:
for myfield in fields:
spws = field_spws[myfield]
for myspw in spws:
reference_frequency = center_frequencies[myspw]
EVLA_band = find_EVLA_band(reference_frequency)
logprint ("Center freq for spw "+str(myspw)+" = "+str(reference_frequency)+", observing band = "+EVLA_band, logfileout='logs/fluxgains.log')
model_image = standard_source_names[ii]+'_'+EVLA_band+'.im'
logprint ("Setting model for field "+str(myfield)+" spw "+str(myspw)+" using "+model_image, logfileout='logs/fluxgains.log')
try:
default('setjy')
vis='calibrators.ms'
field=str(myfield)
spw=str(myspw)
selectdata=False
scalebychan=True
standard='Perley-Butler 2013'
model=model_image
listmodels=False
usescratch=scratch
setjy()
except:
logprint('no data found for field ' + str(myfield)+" spw "+str(myspw), logfileout='logs/fluxgains.log')
ii=ii+1
tb.close()
logprint ("Making gain tables for flux density bootstrapping", logfileout='logs/fluxgains.log')
logprint ("Short solint = "+new_gain_solint1, logfileout='logs/fluxgains.log')
logprint ("Long solint = "+gain_solint2, logfileout='logs/fluxgains.log')
print ""
print "Finding a reference antenna"
print ""
refantspw=''
refantfield=calibrator_field_select_string
findrefant=RefAntHeuristics(vis='calibrators.ms',field=refantfield,geometry=True,flagging=True)
RefAntOutput=findrefant.calculate()
refAnt=str(RefAntOutput[0])+','+str(RefAntOutput[1])+','+str(RefAntOutput[2])+','+str(RefAntOutput[3])
logprint ("The pipeline will use antenna(s) "+refAnt+" as the reference", logfileout='logs/fluxgains.log')
# Derive amp gain table. Note that gaincurves and opacity
# corrections have already been applied during applycal and split in
# semiFinalBPdcals/solint.py.
# Need to add check for 3C84 in here, when heuristics have been sorted out
default('gaincal')
vis='calibrators.ms'
caltable='fluxphaseshortgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=new_gain_solint1
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=3.0
solnorm=False
gaintype='G'
smodel=[]
calmode='p'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b'])
gaintable=['']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
default('gaincal')
vis='calibrators.ms'
caltable='fluxgaincal.g'
field=''
spw=''
intent=''
selectdata=False
solint=gain_solint2
combine='scan'
preavg=-1.0
refant=refAnt
minblperant=minBL_for_cal
minsnr=5.0
solnorm=False
gaintype='G'
smodel=[]
calmode='ap'
append=False
docallib=False
#gaintable=filter(None, [priorcals,'delay.k','BPcal.b','fluxphaseshortgaincal.g'])
gaintable=['fluxphaseshortgaincal.g']
gainfield=['']
interp=['']
spwmap=[]
parang=False
gaincal()
logprint ("Gain table fluxgaincal.g is ready for flagging", logfileout='logs/fluxgains.log')
# Calculate fractions of flagged solutions for final QA2; note, can
# tolerate higher fraction of flagged solutions for this step than in
# other gain tables
flaggedGainSolns=getCalFlaggedSoln('fluxgaincal.g')
if (flaggedGainSolns['all']['total'] == 0):
QA2_fluxgains='Fail'
elif (flaggedGainSolns['antmedian']['fraction'] > 0.2):
QA2_fluxgains='Partial'
logprint ("QA2 score: "+QA2_fluxgains, logfileout='logs/fluxgains.log')
logprint ("Finished EVLA_pipe_fluxgains.py", logfileout='logs/fluxgains.log')
time_list=runtiming('fluxgains', 'end')
pipeline_save()
| e-koch/VLA_Lband | 16B/pipeline4.7.1_custom/EVLA_pipe_fluxgains.py | Python | mit | 6,065 |
from django.db import models
# Create your models here.
class MissingLog(models.Model):
font_name = models.CharField(max_length=256)
sys_font = models.BooleanField()
def __unicode__(self):
return '[%s] %s' % (self.sys_font and 'sys' or 'cad', self.font_name)
class Meta:
ordering = ['sys_font']
db_table = 'FontCenter_missing_log'
app_label = 'Warrentech_FontCenter_Web'
| Xiongpq/FontCenter | trunk/src/Web/Warrentech_FontCenter_Web/Warrentech_FontCenter_Web/models/MissingLog.py | Python | mit | 448 |
# coding=utf-8
import os
import sys
import pytest
import virtual_environments
from diff_test_tools import SCRIPT, expected_messages
from service_messages import ServiceMessage, assert_service_messages, match
from test_util import run_command, get_teamcity_messages_root
@pytest.fixture(scope='module', params=["nose==1.3.7"]) # Nose is dead, support only latest version
def venv(request):
"""
Prepares a virtual environment for nose.
:rtype : virtual_environments.VirtualEnvDescription
"""
return virtual_environments.prepare_virtualenv([request.param])
def _test_count(venv, count):
nose_version = None
for package in venv.packages:
if package.startswith("nose=="):
nose_version = tuple([int(x) for x in package[6:].split(".")])
if nose_version is None or nose_version >= (1, 3, 4):
return ServiceMessage('testCount', {'count': str(count)})
return None
def test_hierarchy(venv):
output = run(venv, 'hierarchy')
test_name = 'namespace1.namespace2.testmyzz.test'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'captureStandardOutput': 'false', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_doctests(venv):
output = run(venv, 'doctests', options="--with-doctest")
test_name = 'doctests.namespace1.d.multiply'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_docstrings(venv):
output = run(venv, 'docstrings')
test_name = 'testa.test_func (My cool test_name)'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_skip(venv):
output = run(venv, 'skiptest')
test_name = 'testa.test_func'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testIgnored', {'name': test_name, 'message': u'SKIPPED: my skip причина', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_coverage(venv):
venv_with_coverage = virtual_environments.prepare_virtualenv(venv.packages + ["coverage==4.5.4"])
coverage_file = os.path.join(virtual_environments.get_vroot(), "coverage-temp.xml")
output = run(venv_with_coverage, 'coverage', options="--with-coverage --cover-erase --cover-tests --cover-xml --cover-xml-file=\"" + coverage_file + "\"")
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': 'testa.test_mycode'}),
ServiceMessage('testFinished', {'name': 'testa.test_mycode'}),
])
f = open(coverage_file, "rb")
content = str(f.read())
f.close()
assert content.find('<line hits="1" number="2"/>') > 0
def test_flask_test_incomplete(venv):
venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["Flask-Testing==0.8.1"])
output = run(venv_with_flask, 'flask_testing_incomplete')
test_name = 'test_foo.TestIncompleteFoo.test_add'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("nNotImplementedError") > 0
def test_flask_test_ok(venv):
venv_with_flask = virtual_environments.prepare_virtualenv(venv.packages + ["Flask-Testing==0.8.1"])
output = run(venv_with_flask, 'flask_testing_ok')
test_name = 'test_foo.TestFoo.test_add'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_deprecated(venv):
output = run(venv, 'deprecatedtest')
test_name = 'testa.test_func'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testIgnored', {'name': test_name, 'message': 'Deprecated', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
@pytest.mark.skipif("sys.version_info < (2, 7) ", reason="requires Python 2.7")
def test_diff(venv):
output = run(venv, SCRIPT)
assert_service_messages(
output,
[
_test_count(venv, 3),
] + expected_messages('diff_assert.FooTest'))
@pytest.mark.skipif("sys.version_info < (2, 7) ", reason="requires Python 2.7")
def test_long_diff(venv):
output = run(venv, "../diff_assert_long.py")
test_name = 'diff_assert_long.FooTest.test_test'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
def test_generators(venv):
output = run(venv, 'generators')
assert_service_messages(
output,
[
_test_count(venv, 3),
ServiceMessage('testStarted', {'name': 'testa.test_evens(0, 0, |\'_|\')'}),
ServiceMessage('testFinished', {'name': 'testa.test_evens(0, 0, |\'_|\')'}),
ServiceMessage('testStarted', {'name': "testa.test_evens(1, 3, |'_|')"}),
ServiceMessage('testFinished', {'name': "testa.test_evens(1, 3, |'_|')"}),
ServiceMessage('testStarted', {'name': "testa.test_evens(2, 6, |'_|')"}),
ServiceMessage('testFinished', {'name': "testa.test_evens(2, 6, |'_|')"}),
])
def test_generators_class(venv):
output = run(venv, 'generators_class')
assert_service_messages(
output,
[
_test_count(venv, 3),
ServiceMessage('testStarted', {'name': 'testa.TestA.test_evens(0, 0, |\'_|\')'}),
ServiceMessage('testFinished', {'name': 'testa.TestA.test_evens(0, 0, |\'_|\')'}),
ServiceMessage('testStarted', {'name': "testa.TestA.test_evens(1, 3, |'_|')"}),
ServiceMessage('testFinished', {'name': "testa.TestA.test_evens(1, 3, |'_|')"}),
ServiceMessage('testStarted', {'name': "testa.TestA.test_evens(2, 6, |'_|')"}),
ServiceMessage('testFinished', {'name': "testa.TestA.test_evens(2, 6, |'_|')"}),
])
def test_pass_output(venv):
output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_pass')
test_name = 'nose-guinea-pig.GuineaPig.test_pass'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'captureStandardOutput': 'false'}),
ServiceMessage('testStdOut', {'out': 'Output from test_pass|n', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
def test_pass_no_capture(venv):
output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_pass', options="--nocapture")
assert output.find("Output from test_pass") > 0
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': 'nose-guinea-pig.GuineaPig.test_pass', 'captureStandardOutput': 'true'}),
ServiceMessage('testFinished', {'name': 'nose-guinea-pig.GuineaPig.test_pass'}),
])
def test_fail(venv):
output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail')
test_name = 'nose-guinea-pig.GuineaPig.test_fail'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("2 * 2 == 5") > 0
def test_setup_module_error(venv):
output = run(venv, 'setup_module_error')
test_name = 'namespace2.testa.setup'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("AssertionError") > 0
def test_setup_class_error(venv):
output = run(venv, 'setup_class_error')
test_name = 'testa.TestXXX.setup'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in setup context'}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("RRR") > 0
def test_setup_package_error(venv):
output = run(venv, 'setup_package_error')
test_name = 'namespace2.setup'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in setup context'}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("AssertionError") > 0
def test_setup_function_error(venv):
output = run(venv, 'setup_function_error')
test_name = 'testa.test'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("AssertionError") > 0
def test_teardown_module_error(venv):
output = run(venv, 'teardown_module_error')
test_name = 'namespace2.testa.teardown'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': 'namespace2.testa.test_mycode'}),
ServiceMessage('testFinished', {'name': 'namespace2.testa.test_mycode'}),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("AssertionError") > 0
def test_teardown_class_error(venv):
output = run(venv, 'teardown_class_error')
test_name = 'testa.TestXXX.teardown'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': 'testa.TestXXX.runTest'}),
ServiceMessage('testFinished', {'name': 'testa.TestXXX.runTest'}),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("RRR") > 0
def test_teardown_package_error(venv):
output = run(venv, 'teardown_package_error')
test_name = 'namespace2.teardown'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': 'namespace2.testa.test_mycode'}),
ServiceMessage('testFinished', {'name': 'namespace2.testa.test_mycode'}),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name, 'message': 'error in teardown context'}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("AssertionError") > 0
def test_teardown_function_error(venv):
output = run(venv, 'teardown_function_error')
test_name = 'testa.test'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Traceback") == 0
assert failed_ms.params['details'].find("AssertionError") > 0
def test_buffer_output(venv):
output = run(venv, 'buffer_output')
test_name = 'test_buffer_output.SpamTest.test_test'
assert_service_messages(
output,
[_test_count(venv, 1)] +
[
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testStdOut', {'out': "stdout_line1|n", 'flowId': test_name}),
ServiceMessage('testStdOut', {'out': "stdout_line2|n", 'flowId': test_name}),
ServiceMessage('testStdOut', {'out': "stdout_line3_nonewline", 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
# Check no stdout_test or stderr_test in the output (not in service messages)
# it checks self._mirrorOutput = False
output = output.replace("out='stdout_test", "").replace("out='stderr_test", "")
assert output.find("stdout_test") < 0
assert output.find("stderr_test") < 0
# assert logcapture plugin works
assert output.find("begin captured logging") > 0
assert output.find("log info message") >= 0
def test_fail_with_msg(venv):
output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_with_msg')
test_name = 'nose-guinea-pig.GuineaPig.test_fail_with_msg'
ms = assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name}),
])
failed_ms = match(ms, ServiceMessage('testFailed', {'name': test_name}))
assert failed_ms.params['details'].find("Bitte keine Werbung") > 0
def test_fail_output(venv):
output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_output')
test_name = 'nose-guinea-pig.GuineaPig.test_fail_output'
assert_service_messages(
output,
[
_test_count(venv, 1),
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testStdOut', {'name': test_name, 'out': 'Output line 1|n', 'flowId': test_name}),
ServiceMessage('testStdOut', {'name': test_name, 'out': 'Output line 2|n', 'flowId': test_name}),
ServiceMessage('testStdOut', {'name': test_name, 'out': 'Output line 3|n', 'flowId': test_name}),
ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
])
def test_fail_big_output(venv):
output = run(venv, 'nose-guinea-pig.py', 'GuineaPig', 'test_fail_big_output', print_output=False)
test_name = 'nose-guinea-pig.GuineaPig.test_fail_big_output'
full_line = 'x' * 50000
leftovers = 'x' * (1024 * 1024 - 50000 * 20)
assert_service_messages(
output,
[_test_count(venv, 1)] +
[ServiceMessage('testStarted', {})] +
[ServiceMessage('testStdOut', {'out': full_line, 'flowId': test_name})] * 20 +
[ServiceMessage('testStdOut', {'out': leftovers, 'flowId': test_name})] +
[ServiceMessage('testFailed', {'name': test_name, 'flowId': test_name})] +
[ServiceMessage('testFinished', {})]
)
@pytest.mark.skipif("sys.version_info < (2, 7)", reason="requires Python 2.7+")
def test_issue_98(venv):
# Start the process and wait for its output
custom_test_loader = os.path.join(get_teamcity_messages_root(), 'tests', 'guinea-pigs', 'nose', 'issue_98', 'custom_test_loader.py')
command = os.path.join(venv.bin, 'python') + " " + custom_test_loader
output = run_command(command)
test_name = 'simple_tests.SimpleTests.test_two'
assert_service_messages(
output,
[
ServiceMessage('testStarted', {'name': test_name, 'flowId': test_name}),
ServiceMessage('testIgnored', {'name': test_name, 'message': 'Skipped: Skipping', 'flowId': test_name}),
ServiceMessage('testFinished', {'name': test_name, 'flowId': test_name}),
],
actual_messages_predicate=lambda ms: ms.name != "testCount"
)
def test_nose_parameterized(venv):
venv_with_params = virtual_environments.prepare_virtualenv(venv.packages + ["nose-parameterized"])
output = run(venv_with_params, 'nose_parameterized')
test1_name = "test.test(|'1_1|', |'https://facebook_com/share_php?http://foo_com/|')"
test2_name = 'test.test(None, 3)'
assert_service_messages(
output,
[
_test_count(venv, 2),
ServiceMessage('testStarted', {'name': test1_name, 'flowId': test1_name}),
ServiceMessage('testFinished', {'name': test1_name, 'flowId': test1_name}),
ServiceMessage('testStarted', {'name': test2_name, 'flowId': test2_name}),
ServiceMessage('testFinished', {'name': test2_name, 'flowId': test2_name}),
])
def run(venv, file, clazz=None, test=None, print_output=True, options=""):
if sys.version_info > (3, 8):
pytest.skip("nose is outdated and doesn't support 3.8")
if clazz:
clazz_arg = ":" + clazz
else:
clazz_arg = ""
if test:
test_arg = "." + test
else:
test_arg = ""
command = os.path.join(venv.bin, 'nosetests') + \
" -v " + options + " " + \
os.path.join('tests', 'guinea-pigs', 'nose', file) + clazz_arg + test_arg
return run_command(command, print_output=print_output)
| throwable-one/teamcity-messages | tests/integration-tests/nose_integration_test.py | Python | apache-2.0 | 20,505 |
"""Copyright 2009:
Isaac Carroll, Kevin Clement, Jon Handy, David Carroll, Daniel Carroll
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import pygame
from pygame.locals import *
import string
import sys, os, os.path
import logging
from xml.dom import minidom, Node
#****************************************************************************
# The tileset class reads tileset information from a XML file, and
# loads the corresponding graphics files into pygame surfaces.
#****************************************************************************
class Tileset:
def __init__(self, clientstate):
self.client = clientstate
self.imagestore = {} # Stores Pygame surfaces
self.animstore = {} # Stores the number of animation frames
self.animation_frame = 0
#****************************************************************************
#load tileset information
#****************************************************************************
def load_tileset(self):
self.imagestore = None
self.imagestore = {}
self.edges = {}
tilesetfile = self.client.settings.tileset
doc = minidom.parse(tilesetfile)
rootNode = doc.documentElement
tilesetPath = rootNode.getAttribute('path')
self.tile_width = int(rootNode.getAttribute('tile_width'))
self.tile_height = int(rootNode.getAttribute('tile_height'))
self.edge_tile_height = int(rootNode.getAttribute('edge_tile_height'))
self.panel_width = int(rootNode.getAttribute('panel_width'))
self.panel_height = int(rootNode.getAttribute('panel_height'))
for fileNode in rootNode.getElementsByTagName('file'):
image_file_name = os.path.join(tilesetPath, fileNode.getAttribute('src'))
try:
# Load image file with PIL.
image_full = pygame.image.load(image_file_name).convert_alpha()
except IOError:
logging.error("Loading of graphic file failed: %s" % (image_file_name))
pygame.quit()
# Load any sprite at a specified position
for tileNode in fileNode.getElementsByTagName('sprite'):
name = tileNode.getAttribute('name')
x = int(tileNode.getAttribute('x'))
y = int(tileNode.getAttribute('y'))
width = int(tileNode.getAttribute('width'))
height = int(tileNode.getAttribute('height'))
per_pixel_alpha = ("true" == str(tileNode.getAttribute('pixelalpha')))
self.tileset_add_image(image_full, name, x, y, width, height, per_pixel_alpha)
# Load a terrain tile
for tileNode in fileNode.getElementsByTagName('terrain'):
name = tileNode.getAttribute('name')
x = int(tileNode.getAttribute('x'))
y = int(tileNode.getAttribute('y'))
width = int(tileNode.getAttribute('width'))
height = int(tileNode.getAttribute('height'))
per_pixel_alpha = ("true" == str(tileNode.getAttribute('pixelalpha')))
for frameNode in tileNode.getElementsByTagName('frame'):
slotx = int(frameNode.getAttribute('slot-x'))
sloty = int(frameNode.getAttribute('slot-y'))
sub_x = x + slotx * width + slotx
sub_y = y + sloty * height + sloty
self.tileset_add_image(image_full, name, sub_x, sub_y, width, height, per_pixel_alpha)
# Load a edge tile
for tileNode in fileNode.getElementsByTagName('edgeterrain'):
primary = tileNode.getAttribute('primary')
secondary = tileNode.getAttribute('secondary')
self.edges.update({primary:secondary})
x = int(tileNode.getAttribute('x'))
y = int(tileNode.getAttribute('y'))
width = int(tileNode.getAttribute('width'))
height = int(tileNode.getAttribute('height'))
per_pixel_alpha = ("true" == str(tileNode.getAttribute('pixelalpha')))
for frameNode in tileNode.getElementsByTagName('frame'):
slotx = int(frameNode.getAttribute('slot-x'))
sloty = int(frameNode.getAttribute('slot-y'))
edge_key = frameNode.getAttribute('key')
key = primary + "-" + secondary + "-" + edge_key
sub_x = x + slotx * width + slotx
sub_y = y + sloty * height + sloty
self.tileset_add_image(image_full, key, sub_x, sub_y, width, height, per_pixel_alpha)
for tileNode in fileNode.getElementsByTagName('weapon'):
placeholder = True
# Load units graphic
for tileNode in fileNode.getElementsByTagName('unit'):
name = tileNode.getAttribute('name')
x = int(tileNode.getAttribute('x'))
y = int(tileNode.getAttribute('y'))
frames = int(tileNode.getAttribute('frames'))
width = int(tileNode.getAttribute('width'))
height = int(tileNode.getAttribute('height'))
per_pixel_alpha = ("true" == str(tileNode.getAttribute('pixelalpha')))
self.animstore.update({name: frames})
for teamID in range(1, 4):
typeset = self.client.game.get_unit_typeset(name)
if typeset == "build" or typeset == "tether" or typeset == "weap" or typeset == "balloon":
color = self.client.game.get_unit_color(int(teamID))
else:
color = None
for frameNode in tileNode.getElementsByTagName('frame'):
slotx = int(frameNode.getAttribute('slot-x'))
sloty = int(frameNode.getAttribute('slot-y'))
dir = frameNode.getAttribute('dir')
frame = frameNode.getAttribute('anim_frame')
key = name + dir + frame + str(teamID)
sub_x = x + slotx * width + slotx
sub_y = y + sloty * height + sloty
self.tileset_add_image(image_full, key, sub_x, sub_y, width, height, per_pixel_alpha, color)
#****************************************************************************
#
#****************************************************************************
def tileset_add_image(self, image, key, x, y, width, height, alpha, color=None):
tempimage = image.copy()
if color:
for pixelX in range(width):
for pixelY in range(height):
pixel = tempimage.get_at((pixelX, pixelY))
(red, green, blue, a) = pixel
(new_r, new_g, new_b) = color
if ((red < blue + 50 and red > blue - 50) and (green < red - 50 or green < blue - 50)) or (red < blue + 30 and red > blue - 30) and (green < red - 100 or green < blue - 100):
new_color = (red * new_r / 255, new_g * red / 255, new_b * red / 255, a)
tempimage.set_at((pixelX, pixelY), new_color)
self.imagestore.update({key: tempimage})
#****************************************************************************
#
#****************************************************************************
def get_terrain_surf_from_tile(self, tile):
tile_key = str(tile.type.id)
try:
return self.imagestore[tile_key]
except KeyError:
return None
#****************************************************************************
#
#****************************************************************************
def is_edge_tile(self, tile):
is_edge = 0
for adj_tile in self.client.map.get_adjacent_tiles((tile.x, tile.y)):
if tile.type.id != adj_tile.type.id:
is_edge = 1
is_edge = 0 #this is to disable edge tiles which are incompatible with current map
return (is_edge and str(tile.type.id) in self.edges)
#****************************************************************************
#
#****************************************************************************
def get_edge_surf_from_tile(self, tile):
real_map_x = tile.x
real_map_y = tile.y
for adj_tile in self.client.map.get_adjacent_tiles((tile.x, tile.y)):
if tile.type.id != adj_tile.type.id:
secondary_type = str(adj_tile.type.id)
nw = self.client.map.get_north_west_tile((real_map_x, real_map_y))
ne = self.client.map.get_north_east_tile((real_map_x, real_map_y))
sw = self.client.map.get_south_west_tile((real_map_x, real_map_y))
se = self.client.map.get_south_east_tile((real_map_x, real_map_y))
n = self.client.map.get_north_tile((real_map_x, real_map_y))
w = self.client.map.get_west_tile((real_map_x, real_map_y))
e = self.client.map.get_east_tile((real_map_x, real_map_y))
s = self.client.map.get_south_tile((real_map_x, real_map_y))
if (not nw or not ne or not sw or not se): return
coast_str_1 = ("%s%su%s" % (self.client.map.get_coast_type(nw), self.client.map.get_coast_type(n), self.client.map.get_coast_type(w)))
coast_str_2 = ("%s%s%su" % (self.client.map.get_coast_type(n), self.client.map.get_coast_type(ne), self.client.map.get_coast_type(e)))
coast_str_3 = ("u%s%s%s" % (self.client.map.get_coast_type(e), self.client.map.get_coast_type(se), self.client.map.get_coast_type(s)))
coast_str_4 = ("%su%s%s" % (self.client.map.get_coast_type(w), self.client.map.get_coast_type(s), self.client.map.get_coast_type(sw)))
tile_key1 = ("%s-%s-%s" % (tile.type.id, secondary_type, coast_str_1))
tile_key2 = ("%s-%s-%s" % (tile.type.id, secondary_type, coast_str_2))
tile_key3 = ("%s-%s-%s" % (tile.type.id, secondary_type, coast_str_3))
tile_key4 = ("%s-%s-%s" % (tile.type.id, secondary_type, coast_str_4))
surface1 = self.imagestore[tile_key1]
surface2 = self.imagestore[tile_key2]
surface3 = self.imagestore[tile_key3]
surface4 = self.imagestore[tile_key4]
return (surface1, surface2, surface3, surface4)
#****************************************************************************
#
#****************************************************************************
def get_tile_surf(self, key, height=None):
try:
return self.imagestore[key]
except KeyError:
return None
#****************************************************************************
#
#****************************************************************************
def get_unit_surf_from_tile(self, unit_sprite, dir, playerID):
frames_max = self.animstore[unit_sprite]
frame = int(self.animation_frame) % frames_max
tile_key = "%s%s%r%s" % (unit_sprite, dir, frame, playerID)
try:
return self.imagestore[tile_key]
except KeyError:
return None
#****************************************************************************
#
#****************************************************************************
def get_mouse_cursor(self, type):
frames_max = self.animstore[type]
frame = int(self.animation_frame) % frames_max
tile_key = "%s%r" % (type, frame)
try:
return self.imagestore[tile_key]
except KeyError:
return None
#****************************************************************************
#
#****************************************************************************
def animation_next(self):
self.animation_frame += 0.5
#****************************************************************************
# This method returns a pygame surface from a filename.
#****************************************************************************
def load(image_file_name):
try:
# Load image file with PIL.
image = pygame.image.load(image_file_name).convert_alpha()
# Convert PIL image to Pygame surface
#return pygame.image.fromstring(image.tostring(), image.size, image.mode)
return(image)
except IOError:
logging.error("Loading of graphic file failed: %s" % (image_file_name))
pygame.quit()
| Donkyhotay/MoonPy | client/tileset.py | Python | gpl-3.0 | 13,461 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# acertmgr - various support functions
# Copyright (c) Markus Hauschild & David Klaftenegger, 2016.
# Copyright (c) Rudolf Mayerhofer, 2019.
# available under the ISC license, see LICENSE
import base64
import datetime
import io
import os
import re
import stat
import sys
import traceback
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa, ec, padding
from cryptography.hazmat.primitives.asymmetric.utils import decode_dss_signature
from cryptography.utils import int_to_bytes
from cryptography.x509.oid import NameOID, ExtensionOID
try:
from cryptography.x509 import ocsp
except ImportError:
pass
try:
from cryptography.hazmat.primitives.asymmetric import ed25519, ed448
except ImportError:
pass
try:
from urllib.request import urlopen, Request # Python 3
except ImportError:
from urllib2 import urlopen, Request # Python 2
class InvalidCertificateError(Exception):
pass
# @brief a simple, portable indent function
def indent(text, spaces=0):
ind = ' ' * spaces
return os.linesep.join(ind + line for line in text.splitlines())
# @brief wrapper for log output
def log(msg, exc=None, error=False, warning=False):
if error:
prefix = "Error: "
elif warning:
prefix = "Warning: "
else:
prefix = ""
output = prefix + msg
if exc:
_, exc_value, _ = sys.exc_info()
if not getattr(exc, '__traceback__', None) and exc == exc_value:
# Traceback handling on Python 2 is ugly, so we only output it if the exception is the current sys one
formatted_exc = traceback.format_exc()
else:
formatted_exc = traceback.format_exception(type(exc), exc, getattr(exc, '__traceback__', None))
exc_string = ''.join(formatted_exc) if isinstance(formatted_exc, list) else str(formatted_exc)
output += os.linesep + indent(exc_string, len(prefix))
if error or warning:
sys.stderr.write(output + os.linesep)
sys.stderr.flush() # force flush buffers after message was written for immediate display
else:
sys.stdout.write(output + os.linesep)
sys.stdout.flush() # force flush buffers after message was written for immediate display
# @brief wrapper for downloading an url
def get_url(url, data=None, headers=None):
return urlopen(Request(url, data=data, headers={} if headers is None else headers))
# @brief check whether existing certificate is still valid or expiring soon
# @param crt_file string containing the path to the certificate file
# @param ttl_days the minimum amount of days for which the certificate must be valid
# @return True if certificate is still valid for at least ttl_days, False otherwise
def is_cert_valid(cert, ttl_days):
now = datetime.datetime.now()
if cert.not_valid_before > now:
raise InvalidCertificateError("Certificate seems to be from the future")
expiry_limit = now + datetime.timedelta(days=ttl_days)
if cert.not_valid_after < expiry_limit:
return False
return True
# @brief create a certificate signing request
# @param names list of domain names the certificate should be valid for
# @param key the key to use with the certificate in pyopenssl format
# @param must_staple whether or not the certificate should include the OCSP must-staple flag
# @return the CSR in pyopenssl format
def new_cert_request(names, key, must_staple=False):
primary_name = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME,
names[0].decode('utf-8') if getattr(names[0], 'decode', None) else
names[0])])
all_names = x509.SubjectAlternativeName(
[x509.DNSName(name.decode('utf-8') if getattr(name, 'decode', None) else name) for name in names])
req = x509.CertificateSigningRequestBuilder()
req = req.subject_name(primary_name)
req = req.add_extension(all_names, critical=False)
if must_staple:
if getattr(x509, 'TLSFeature', None):
req = req.add_extension(x509.TLSFeature(features=[x509.TLSFeatureType.status_request]), critical=False)
else:
log('OCSP must-staple ignored as current version of cryptography does not support the flag.', warning=True)
req = req.sign(key, hashes.SHA256(), default_backend())
return req
# @brief generate a new account key
# @param path path where the new key file should be written in PEM format (optional)
def new_account_key(path=None, key_algo=None, key_size=None):
return new_ssl_key(path, key_algo, key_size)
# @brief generate a new ssl key
# @param path path where the new key file should be written in PEM format (optional)
def new_ssl_key(path=None, key_algo=None, key_size=None):
if not key_algo or key_algo.lower() == 'rsa':
if not key_size:
key_size = 4096
key_format = serialization.PrivateFormat.TraditionalOpenSSL
private_key = rsa.generate_private_key(
public_exponent=65537,
key_size=key_size,
backend=default_backend()
)
elif key_algo.lower() == 'ec':
if not key_size or key_size == 256:
key_curve = ec.SECP256R1
elif key_size == 384:
key_curve = ec.SECP384R1
elif key_size == 521:
key_curve = ec.SECP521R1
else:
raise ValueError("Unsupported EC curve size parameter: {}".format(key_size))
key_format = serialization.PrivateFormat.PKCS8
private_key = ec.generate_private_key(curve=key_curve, backend=default_backend())
elif key_algo.lower() == 'ed25519' and "cryptography.hazmat.primitives.asymmetric.ed25519":
key_format = serialization.PrivateFormat.PKCS8
private_key = ed25519.Ed25519PrivateKey.generate()
elif key_algo.lower() == 'ed448' and "cryptography.hazmat.primitives.asymmetric.ed448":
key_format = serialization.PrivateFormat.PKCS8
private_key = ed448.Ed448PrivateKey.generate()
else:
raise ValueError("Unsupported key algorithm: {}".format(key_algo))
if path is not None:
pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=key_format,
encryption_algorithm=serialization.NoEncryption(),
)
with io.open(path, 'wb') as pem_out:
pem_out.write(pem)
if hasattr(os, 'chmod'):
try:
os.chmod(path, int("0400", 8))
except OSError:
log('Could not set file permissions on {0}!'.format(path), warning=True)
else:
log('Keyfile permission handling unavailable on this platform', warning=True)
return private_key
# @brief read a key from file
# @param path path to file
# @param key indicate whether we are loading a key
# @param csr indicate whether we are loading a csr
# @return the key in pyopenssl format
def read_pem_file(path, key=False, csr=False):
with io.open(path, 'r') as f:
if key:
return serialization.load_pem_private_key(f.read().encode('utf-8'), None, default_backend())
elif csr:
return x509.load_pem_x509_csr(f.read().encode('utf8'), default_backend())
else:
return convert_pem_str_to_cert(f.read())
# @brief write cert data to PEM formatted file
def write_pem_file(crt, path, perms=None):
if hasattr(os, 'chmod') and os.path.exists(path):
try:
os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
except OSError:
log('Could not make file ({0}) writable'.format(path), warning=True)
with io.open(path, "w") as f:
f.write(convert_cert_to_pem_str(crt))
if perms:
if hasattr(os, 'chmod'):
try:
os.chmod(path, perms)
except OSError:
log('Could not set file permissions ({0}) on {1}!'.format(perms, path), warning=True)
else:
log('PEM-File permission handling unavailable on this platform', warning=True)
# @brief download the issuer ca for a given certificate
# @param cert certificate data
# @returns ca certificate data
def download_issuer_ca(cert):
aia = cert.extensions.get_extension_for_oid(ExtensionOID.AUTHORITY_INFORMATION_ACCESS)
ca_issuers = None
for data in aia.value:
if data.access_method == x509.OID_CA_ISSUERS:
ca_issuers = data.access_location.value
break
if not ca_issuers:
log("Could not determine issuer CA for given certificate: {}".format(cert), error=True)
return None
log("Downloading CA certificate from {}".format(ca_issuers))
resp = get_url(ca_issuers)
code = resp.getcode()
if code >= 400:
log("Could not download issuer CA (error {}) for given certificate: {}".format(code, cert), error=True)
return None
return x509.load_der_x509_certificate(resp.read(), default_backend())
# @brief determine all san domains on a given certificate
def get_cert_domains(cert):
san_cert = cert.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
domains = set()
domains.add(cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value)
if san_cert:
for d in san_cert.value:
domains.add(d.value)
# Convert IDNA domain to correct representation and return the list
return [x for x, _ in idna_convert(domains)]
# @brief determine certificate cn
def get_cert_cn(cert):
return "CN={}".format(cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME)[0].value)
# @brief determine certificate end of validity
def get_cert_valid_until(cert):
return cert.not_valid_after
# @brief convert certificate to PEM format
# @param cert certificate object or a list thereof
# @return the certificate in PEM format
def convert_cert_to_pem_str(cert):
if not isinstance(cert, list):
cert = [cert]
result = list()
for data in cert:
result.append(data.public_bytes(serialization.Encoding.PEM).decode('utf8'))
return '\n'.join(result)
# @brief load a PEM certificate from str
# @return a certificate object or a list of objects if multiple are in the string
def convert_pem_str_to_cert(certdata):
certs = re.findall(r'(-----BEGIN CERTIFICATE-----\n[^\-]+\n-----END CERTIFICATE-----)',
certdata, re.DOTALL)
result = list()
for data in certs:
result.append(x509.load_pem_x509_certificate(data.encode('utf8'), default_backend()))
return result[0] if len(result) == 1 else result
# @brief serialize cert/csr to DER bytes
def convert_cert_to_der_bytes(data):
return data.public_bytes(serialization.Encoding.DER)
# @brief load a DER certificate from str
def convert_der_bytes_to_cert(data):
return x509.load_der_x509_certificate(data, default_backend())
# @brief determine key signing algorithm and jwk data
# @return key algorithm, signature algorithm, key numbers as a dict
def get_key_alg_and_jwk(key):
if isinstance(key, rsa.RSAPrivateKey):
# See https://tools.ietf.org/html/rfc7518#section-6.3
numbers = key.public_key().public_numbers()
return "RS256", {"kty": "RSA",
"e": bytes_to_base64url(int_to_bytes(numbers.e)),
"n": bytes_to_base64url(int_to_bytes(numbers.n))}
elif isinstance(key, ec.EllipticCurvePrivateKey):
# See https://tools.ietf.org/html/rfc7518#section-6.2
numbers = key.public_key().public_numbers()
if isinstance(numbers.curve, ec.SECP256R1):
alg = 'ES256'
crv = 'P-256'
elif isinstance(numbers.curve, ec.SECP384R1):
alg = 'ES384'
crv = 'P-384'
elif isinstance(numbers.curve, ec.SECP521R1):
alg = 'ES512'
crv = 'P-521'
else:
raise ValueError("Unsupported EC curve in key: {}".format(key))
full_octets = (int(crv[2:]) + 7) // 8
return alg, {"kty": "EC", "crv": crv,
"x": bytes_to_base64url(int_to_bytes(numbers.x, full_octets)),
"y": bytes_to_base64url(int_to_bytes(numbers.y, full_octets))}
elif "cryptography.hazmat.primitives.asymmetric.ed25519" in sys.modules and isinstance(key,
ed25519.Ed25519PrivateKey):
# See https://tools.ietf.org/html/rfc8037#appendix-A.2
return "EdDSA", {"kty": "OKP", "crv": "Ed25519",
"x": bytes_to_base64url(key.public_key().public_bytes(encoding=serialization.Encoding.Raw,
format=serialization.PublicFormat.Raw)
)}
elif "cryptography.hazmat.primitives.asymmetric.ed448" in sys.modules and isinstance(key,
ed448.Ed448PrivateKey):
return "EdDSA", {"kty": "OKP", "crv": "Ed448",
"x": bytes_to_base64url(key.public_key().public_bytes(encoding=serialization.Encoding.Raw,
format=serialization.PublicFormat.Raw)
)}
else:
raise ValueError("Unsupported key: {}".format(key))
# @brief sign string with key
def signature_of_str(key, string):
alg, _ = get_key_alg_and_jwk(key)
data = string.encode('utf8')
if alg == 'RS256':
return key.sign(data, padding.PKCS1v15(), hashes.SHA256())
elif alg.startswith('ES'):
full_octets = (int(alg[2:]) + 7) // 8
if alg == 'ES256':
der_sig = key.sign(data, ec.ECDSA(hashes.SHA256()))
elif alg == 'ES384':
der_sig = key.sign(data, ec.ECDSA(hashes.SHA384()))
elif alg == 'ES512':
der_sig = key.sign(data, ec.ECDSA(hashes.SHA512()))
else:
raise ValueError("Unsupported EC signature algorithm: {}".format(alg))
# convert DER signature to RAW format (https://tools.ietf.org/html/rfc7518#section-3.4)
r, s = decode_dss_signature(der_sig)
return int_to_bytes(r, full_octets) + int_to_bytes(s, full_octets)
elif alg == 'EdDSA':
return key.sign(data)
else:
raise ValueError("Unsupported signature algorithm: {}".format(alg))
# @brief hash a string
def hash_of_str(string):
account_hash = hashes.Hash(hashes.SHA256(), backend=default_backend())
account_hash.update(string.encode('utf8'))
return account_hash.finalize()
# @brief helper function to base64 encode for JSON objects
# @param b the byte-string to encode
# @return the encoded string
def bytes_to_base64url(b):
return base64.urlsafe_b64encode(b).decode('utf8').replace("=", "")
# @brief check whether existing target file is still valid or source crt has been updated
# @param target string containing the path to the target file
# @param file string containing the path to the certificate file
# @return True if target file is at least as new as the certificate, False otherwise
def target_is_current(target, file):
if not os.path.isfile(target):
return False
target_date = os.path.getmtime(target)
crt_date = os.path.getmtime(file)
return target_date >= crt_date
# @brief convert domain list to idna representation (if applicable
def idna_convert(domainlist):
if any(ord(c) >= 128 for c in ''.join(domainlist)):
try:
domaintranslation = list()
for domain in domainlist:
if any(ord(c) >= 128 for c in domain):
# Translate IDNA domain name from a unicode domain (handle wildcards separately)
if domain.startswith('*.'):
idna_domain = "*.{}".format(domain[2:].encode('idna').decode('ascii'))
else:
idna_domain = domain.encode('idna').decode('ascii')
result = idna_domain, domain
else:
result = domain, domain
domaintranslation.append(result)
return domaintranslation
except Exception as e:
log("Unicode domain(s) found but IDNA names could not be translated due to error: {}".format(e), error=True)
return [(x, x) for x in domainlist]
# @brief validate the OCSP status for a given certificate by the given issuer
def is_ocsp_valid(cert, issuer, hash_algo):
if hash_algo == 'sha1':
algorithm = hashes.SHA1
elif hash_algo == 'sha224':
algorithm = hashes.SHA224
elif hash_algo == 'sha256':
algorithm = hashes.SHA256
elif hash_algo == 'sha385':
algorithm = hashes.SHA384
elif hash_algo == 'sha512':
algorithm = hashes.SHA512
else:
log("Invalid hash algorithm '{}' used for OCSP validation. Validation ignored.".format(hash_algo), warning=True)
return True
if isinstance(issuer, list):
issuer = issuer[0] # First certificate in the CA chain is the immediate issuer
try:
ocsp_urls = []
aia = cert.extensions.get_extension_for_oid(ExtensionOID.AUTHORITY_INFORMATION_ACCESS)
for data in aia.value:
if data.access_method == x509.OID_OCSP:
ocsp_urls.append(data.access_location.value)
# This is a bit of a hack due to validation problems within cryptography (TODO: Check if this is still true)
# Correct replacement: ocsprequest = ocsp.OCSPRequestBuilder().add_certificate(cert, issuer, algorithm).build()
ocsprequest = ocsp.OCSPRequestBuilder((cert, issuer, algorithm)).build()
ocsprequestdata = ocsprequest.public_bytes(serialization.Encoding.DER)
for ocsp_url in ocsp_urls:
response = get_url(ocsp_url,
ocsprequestdata,
{
'Accept': 'application/ocsp-response',
'Content-Type': 'application/ocsp-request',
})
ocspresponsedata = response.read()
ocspresponse = ocsp.load_der_ocsp_response(ocspresponsedata)
if ocspresponse.response_status == ocsp.OCSPResponseStatus.SUCCESSFUL \
and ocspresponse.certificate_status == ocsp.OCSPCertStatus.REVOKED:
return False
except Exception as e:
log("An exception occurred during OCSP validation (Validation will be ignored): {}".format(e), error=True)
return True
| davidklaftenegger/acertmgr | acertmgr/tools.py | Python | isc | 18,859 |
#!/usr/bin/env python
# Copyright 2010-2012 RethinkDB, all rights reserved.
import sys, subprocess, os, time, signal
from vcoptparse import *
control_user = "rethinkdb@deadshot"
class VM(object):
def __init__(self, name, uuid, host, control):
self.name = name
self.uuid = uuid
self.host = host
self.control = control
def start(self):
subprocess.Popen(["ssh %s 'VBoxManage startvm --type headless %s'" % (self.control, self.uuid)], shell = True).wait()
start_time = time.time()
while os.system("ssh %s 'true'" % self.host) and time.time() - start_time < 5 * 60: # timeout after 5 minutes
time.sleep(1)
if not os.system("ssh %s 'true'" % self.host):
print "(VM successfully started)"
else:
sys_exit("Error: Failed to connect to VM", -1)
def command(self, cmd, output = False, timeout = 1800):
print "Executing on VM:", cmd
proc = subprocess.Popen(["ssh %s '%s'" % (self.host, cmd)], stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell = True)
start_time = time.time()
while proc.poll() == None and time.time() - start_time < timeout:
if output:
line = proc.stdout.readline()
if line:
print line.strip()
else:
pass
if proc.poll() == None:
proc.send_signal(signal.SIGKILL)
sys_exit("Error: process did not finish within the time limit.", -1)
if proc.poll():
sys_exit("Error: command \"%s\" finished with exit value %d." % (cmd, proc.poll()), proc.poll())
return proc
def shut_down(self, remove_temp = False):
if remove_temp:
self.command("rm -rf /tmp/test.*")
time.sleep(5)
subprocess.Popen(["ssh %s 'VBoxManage controlvm %s poweroff'" % (self.control, self.uuid)], shell = True).wait()
def sys_exit(message, exit_code, shut_down = True):
print message
if shut_down:
target.shut_down()
sys.exit(exit_code)
suse2 = VM('suse2', '2c082dde-eac6-493d-855d-8220a2aae070', '[email protected]', control_user) # this SUSE uses an older version of GCC (where tests happen)
suse = VM('suse', '7bd61095-36c6-4e98-a2c2-4ce6322de5d7', '[email protected]', control_user) # this SUSE uses a new version of GCC (where builds happen)
redhat5_1 = VM('redhat5_1', '32340f79-cea9-42ca-94d5-2da13d408d02', '[email protected]', control_user)
ubuntu = VM('ubuntu', '1f4521a0-6e74-4d20-b4b9-9ffd8e231423', '[email protected]', control_user)
debian = VM('debian', 'cc76e2a5-92c0-4208-be08-5c02429c2c50', '[email protected]', control_user)
centos5_5 = VM('centos5_5', '7595c315-9be0-4e6d-a757-33f018182937', '[email protected]', control_user)
centos6 = VM('centos6', '3f9e9d18-dccb-40b9-ba31-5a68f627b258', '[email protected]', control_user)
vm_list = {"suse2": suse2, "suse": suse, "redhat5_1": redhat5_1, "ubuntu": ubuntu, "debian": debian, "centos5_5": centos5_5, "centos6": centos6}
def help():
print "VM Access:"
print " Runs a command on a remote virtual machine. Starts the virtual machine if necessary, and shuts it down on completion. If the command fails or if the virtual machine is inaccessible, then this script will throw an exception. Before commands are run, the curent directory is compressed and sent over. The command is run in a temporary directory and all its resulting contents are copied back."
print " --help Print this help."
print " --vm-name The target virtual machine to run the command on. Options are:"
print " ", vm_list.keys()
print " --command The command to run on the virtual machine. Either command or shut-down must be specified."
print " --shut-down Use this flag to shut down the specified VM."
o = OptParser()
o["help"] = BoolFlag("--help")
o["vm-name"] = StringFlag("--vm-name", None)
o["command"] = StringFlag("--command", default = None)
o["shut-down"] = BoolFlag("--shut-down")
try:
opts = o.parse(sys.argv)
except OptError:
sys_exit("Argument parsing error", -1, False)
if opts["help"]:
help()
sys_exit("", 0, False)
if not opts["vm-name"]:
sys_exit("Error: must specify a VM name.", -1, False)
if not opts["command"] and not opts["shut-down"]:
sys_exit("Error: must specify a command or call shut-down.", -1, False)
if opts["vm-name"] not in vm_list:
sys_exit("Error: invalid VM name.", -1, False)
target = vm_list[opts["vm-name"]]
if opts["shut-down"]:
target.shut_down(remove_temp = True)
exit(0)
print "Begin: Running command:", opts["command"]
print "\ton VM", target.name
# Start VM
print "***Starting VM..."
target.start()
# Make a temporary directory on VM
print "***Making a temporary directory on the virtual machine..."
proc = target.command("cd /tmp && mktemp -d test.XXXXXXXXXX")
dir_name = "/tmp/" + proc.stdout.readline().strip()
print "***Will be working in directory " + dir_name
# Move files to VM
print "***Transferring files to virtual machine..."
if "RETHINKDB" in os.environ:
print "*****(debug: we are currently running a test)"
subprocess.Popen(" ".join((["cd", os.environ["RETHINKDB"] + "/..", "&&"] if "RETHINKDB" in os.environ else []) + ["tar", "czf", "tmp.tar.gz", "--exclude", "tmp.tar.gz", "*", "&&", "scp", "tmp.tar.gz", "%s:%s/tmp.tar.gz" % (target.host, dir_name)]), shell=True).wait()
target.command(" ".join(["cd", dir_name, "&&", "tar", "xzf", "tmp.tar.gz"]), output=True)
# Execute command
print "***Executing command..."
# modifications
opts["command"] = opts["command"].replace('_HOST', '$HOST').replace('_PORT', '$PORT')
proc = target.command(("cd %s && " % dir_name) + opts["command"], output = True)
# Move files from VM
print "***Transferring files from virtual machine..."
proc = target.command(" ".join(["cd", dir_name, "&&", "rm", "tmp.tar.gz", "&&", "tar", "czf", "tmp.tar.gz", "--exclude", "tmp.tar.gz", "*"]), output = True)
subprocess.Popen(" ".join(["scp", "%s:%s/tmp.tar.gz tmp.tar.gz" % (target.host, dir_name)]), shell = True).wait()
subprocess.Popen(" ".join(["tar", "xzf", "tmp.tar.gz"]), shell = True).wait()
print "***Removing temporary files and shutting down VM..."
target.command("rm -rf %s" % dir_name).wait()
target.shut_down(True)
sys_exit("Done.", 0, False)
| Qinusty/rethinkdb | scripts/VirtuaBuild/vm_access.py | Python | agpl-3.0 | 6,357 |
#!/usr/bin/env python
from numpy import array
from pyspark.mllib.recommendation import ALS
from pyspark import SparkConf, SparkContext
if __name__ == '__main__':
conf = SparkConf().setMaster("local[*]").setAppName("Dating Recommendations")
sc = SparkContext(conf=conf)
# Load and parse the Rating data
data = sc.textFile("~/hadoop-fundamentals/data/dating/ratings.csv")
ratings = data.map(lambda line: array([float(x) for x in line.split(',')]))
# Build the recommendation model using ALS
rank = 100
numIterations = 10
model = ALS.train(ratings, rank, numIterations)
# Evaluate the model on training data
testdata = ratings.map(lambda p: (int(p[0]), int(p[1])))
predictions = model.predictAll(testdata).map(lambda r: ((r[0], r[1]), r[2]))
ratesAndPreds = ratings.map(lambda r: ((r[0], r[1]), r[2])).join(predictions)
MSE = ratesAndPreds.map(lambda r: (r[1][0] - r[1][1])**2).reduce(lambda x, y: x + y)/ratesAndPreds.count()
print("Mean Squared Error = " + str(MSE))
| nvoron23/hadoop-fundamentals | recommender/recommendations.py | Python | mit | 1,038 |
import re
from datetime import datetime
from os.path import join
from urllib.parse import urlencode
import dateutil.parser
import magic
import requests
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import transaction
from candidates.models import PartySet
from popolo.models import Organization
emblem_directory = join(settings.BASE_DIR, "data", "party-emblems")
base_emblem_url = (
"http://search.electoralcommission.org.uk/Api/Registrations/Emblems/"
)
def get_descriptions(party):
return [
{"description": d["Description"], "translation": d["Translation"]}
for d in party["PartyDescriptions"]
]
class Command(BaseCommand):
help = "Update parties from a CSV of party data"
def handle(self, **options):
self.mime_type_magic = magic.Magic(mime=True)
self.gb_parties, _ = PartySet.objects.get_or_create(slug="gb")
self.ni_parties, _ = PartySet.objects.get_or_create(slug="ni")
start = 0
per_page = 30
url = (
"http://search.electoralcommission.org.uk/api/search/Registrations"
)
params = {
"rows": per_page,
"et": ["pp", "ppm"],
"register": ["gb", "ni", "none"],
"regStatus": ["registered", "deregistered", "lapsed"],
"period": [
"127",
"135",
"136",
"205",
"207",
"217",
"2508",
"2510",
"2512",
"2514",
"281",
"289",
"301",
"303",
"305",
"3560",
"37",
"38",
"4",
"404",
"410",
"445",
"49",
"60",
"62",
"68",
"74",
],
}
with transaction.atomic():
total = None
while total is None or start <= total:
params["start"] = start
resp = requests.get(
url + "?" + urlencode(params, doseq=True)
).json()
if total is None:
total = resp["Total"]
self.parse_data(resp["Result"])
start += per_page
def parse_data(self, ec_parties_data):
for ec_party in ec_parties_data:
ec_party_id = ec_party["ECRef"].strip()
# We're only interested in political parties:
if not ec_party_id.startswith("PP"):
continue
party_id = self.clean_id(ec_party_id)
if ec_party["RegulatedEntityTypeName"] == "Minor Party":
register = ec_party["RegisterNameMinorParty"].replace(
" (minor party)", ""
)
else:
register = ec_party["RegisterName"]
party_name, party_dissolved = self.clean_name(
ec_party["RegulatedEntityName"]
)
party_founded = self.clean_date(ec_party["ApprovedDate"])
# Does this party already exist? If not, create a new one.
try:
party = Organization.objects.get(slug=party_id)
print("Got the existing party:", party.name)
except Organization.DoesNotExist:
party = Organization.objects.create(
name=party_name, slug=party_id
)
print(
"Couldn't find {}, creating a new party {}".format(
party_id, party_name
)
)
party.name = party_name
party.classification = "Party"
party.founding_date = party_founded
party.end_date = party_dissolved
party.register = register
{
"Great Britain": self.gb_parties,
"Northern Ireland": self.ni_parties,
}[register].parties.add(party)
party.identifiers.update_or_create(
scheme="electoral-commission",
defaults={"identifier": ec_party_id},
)
party.other_names.filter(note="registered-description").delete()
for d in get_descriptions(ec_party):
value = d["description"]
translation = d["translation"]
if translation:
value = "{} | {}".format(value, translation)
party.other_names.create(
name=value, note="registered-description"
)
party.save()
def clean_date(self, date):
timestamp = re.match(r"\/Date\((\d+)\)\/", date).group(1)
dt = datetime.fromtimestamp(int(timestamp) / 1000.0)
return dt.strftime("%Y-%m-%d")
def clean_name(self, name):
name = name.strip()
if "de-registered" not in name.lower():
return name, "9999-12-31"
match = re.match(r"(.+)\[De-registered ([0-9]+/[0-9]+/[0-9]+)\]", name)
name, deregistered_date = match.groups()
name = re.sub(r"\([Dd]e-?registered [^\)]+\)", "", name)
deregistered_date = dateutil.parser.parse(
deregistered_date, dayfirst=True
).strftime("%Y-%m-%d")
return name.strip(), deregistered_date
def clean_id(self, party_id):
party_id = re.sub(r"^PPm?\s*", "", party_id).strip()
return "party:{}".format(party_id)
| DemocracyClub/yournextrepresentative | ynr/apps/elections/uk/management/commands/uk_update_parties_from_ec_data.py | Python | agpl-3.0 | 5,626 |
#!~/Documents/pysam_consensus/venv/bin/python3
import argparse
from Bio import SeqIO
def message(case, variables=None):
if case == "duplicate_accessions":
to_add, seq_list_file
message="""
$s is a duplicate accession in
file : %s
please remove duplicate accession.\n\n""" %( to_add, seq_list_file)
elif case == "unequal_length" :
rec, fasta = variables
message = """
Check to make sure the file is aligned
%s does not have the same length as
other sequences in the alignment
file: %s
.
\n""" %( rec , fasta)
elif case == "help":
message = """
fasta_ghost.py is to prepare a series of fasta files for
multiple alignments. It takes a list file, one or more
fasta files and an outprefix. It reads each fasta and
if a fasta is missing an accession a new blank accession
of the appropriate length is added.
\n"""
return message
def get_seq_list(seq_list_file):
f=open(seq_list_file, 'r')
seq_split = f.read().split("\n")
seq_list = []
for accession in seq_split:
to_add = accession.strip()
if to_add != '':
assert (to_add not in seq_list ), message("duplicate_accessions", [to_add, seq_list_file])
seq_list.append(to_add)
f.close()
return (seq_list)
def get_seqs(fasta_file):
"""
Function get_seqs() reads entire fasta into memory.
If the sequences are very large and memory is limited
this could be a problem.
"""
seq_dict = {}
prev_length = None
for rec in SeqIO.parse(fasta_file, 'fasta'):
name = rec.id
length = len(rec.seq)
print(name, len(rec.seq))
assert ( name not in seq_dict.keys() ), message("duplicate_accessions", [name, fasta_file])
seq_dict[name] = rec
if prev_length is not None:
assert (length == prev_length ), message("uneqal_length",[rec.id, fasta_file])
prev_length = length
return [length, seq_dict ]
def include_ghost_seqs( seq_list, seqs, length, out):
f=open(out, "w")
f.close()
f=open(out, "a")
for seq in seq_list :
if seq not in seqs.keys():
print(">%s\n%s" %( seq, "".join(["-"]*length)))
f.write(">%s\n%s\n" %( seq, "".join(["-"]*length)))
else:
print( ">%s\n%s" %( seq, str( seqs[seq].seq)))
f.write(">%s\n%s\n" %( seq, str( seqs[seq].seq)))
f.close()
if __name__ =="__main__":
parser = argparse.ArgumentParser(description=message("help"))
parser.add_argument('-f', '--fasta',
nargs='+',
help="space delimited list of aligned fasta files",
type=str,
dest="fasta_files")
parser.add_argument('-l', '--list',
help="file containg lists of all accession names needed in the output fasta.",
type=str,
dest='seq_list_file')
parser.add_argument('-o', '--out',
help='out prefix for fasta file[s] in list.',
type=str,
dest='out',
default="ghost_seqs")
argv = parser.parse_args()
seq_list = get_seq_list(argv.seq_list_file)
print( argv.fasta_files)
for fasta in argv.fasta_files :
out = "%s%s" %(argv.out , fasta.split("/")[-1])
length, seq_dict = get_seqs(fasta)
include_ghost_seqs( seq_list , seq_dict, length, out)
| NDHall/pysam_tools | fasta_ghost/fasta_ghost.py | Python | mit | 3,564 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from urlparse import urlparse
from django.forms import CheckboxSelectMultiple, MultiWidget, URLField
from django.forms import ValidationError
from django.forms.widgets import DateInput, Input, RadioSelect
from django.utils.safestring import mark_safe
import requests
from tower import ugettext as _
class CalendarInput(DateInput):
def render(self, name, value, attrs={}):
if 'class' not in attrs:
attrs['class'] = 'datepicker'
return super(CalendarInput, self).render(name, value, attrs)
class DateRangeWidget(MultiWidget):
def __init__(self, attrs=None):
widgets = (CalendarInput(attrs=attrs), CalendarInput(attrs=attrs))
super(DateRangeWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if value:
return [value.start, value.stop]
return [None, None]
def format_output(self, rendered_widgets):
return '-'.join(rendered_widgets)
class HorizCheckboxSelect(CheckboxSelectMultiple):
def render(self, *args, **kwargs):
output = super(HorizCheckboxSelect, self).render(*args, **kwargs)
return mark_safe(output.replace(u'<ul>', u'').replace(u'</ul>', u'').replace(u'<li>', u'').replace(u'</li>', u''))
class HorizRadioRenderer(RadioSelect.renderer):
""" this overrides widget method to put radio buttons horizontally
instead of vertically.
"""
def render(self):
"""Outputs radios"""
return mark_safe(u'\n'.join([u'%s\n' % w for w in self]))
class HorizRadioSelect(RadioSelect):
renderer = HorizRadioRenderer
class RangeInput(Input):
input_type = 'range'
def render(self, name, value, attrs=None):
markup = """
<div class="range-container">
<div class="range-row">
{input} <span class="range-label"></span>
</div>
<div class="steps">
<span style="left: 0%">0</span>
<span style="left: 25%">15</span>
<span style="left: 50%">30</span>
<span style="left: 75%">45</span>
<span style="left: 100%">60</span>
</div>
</div>
""".format(input=super(RangeInput, self).render(name, value, attrs))
return mark_safe(markup)
class MyURLField(URLField):
def clean(self, value):
if not value:
return None
url = urlparse(value)
if url.scheme == '':
value = 'http://' + value
try:
r = requests.get(value, timeout=10, verify=False)
if not r.ok:
raise ValidationError(_('The website is not reachable. Please enter a valid url.'))
except requests.exceptions.RequestException:
raise ValidationError(_('The website is not reachable. Please enter a valid url.'))
return super(MyURLField, self).clean(value)
| adini121/oneanddone | oneanddone/base/widgets.py | Python | mpl-2.0 | 3,122 |
issues=[
dict(name='Habit',number=5,season='Winter 2012',
description='commit to a change, experience it, and record'),
dict(name='Interview', number=4, season='Autumn 2011',
description="this is your opportunity to inhabit another's mind"),
dict(name= 'Digital Presence', number= 3, season= 'Summer 2011',
description='what does your digital self look like?'),
dict(name= 'Adventure', number=2, season= 'Spring 2011',
description='take an adventure and write about it.'),
dict(name= 'Unplugging', number=1, season= 'Winter 2011',
description='what are you looking forward to leaving?')
]
siteroot='/Users/adam/open review quarterly/source/'
infodir='/Users/adam/open review quarterly/info'
skip_issues_before=5
illustration_tag='=== Illustration ==='
illustration_tag_sized="=== Illustration width: 50% ===" | adamgreenhall/openreviewquarterly | builder/config.py | Python | mit | 879 |
# -*- coding: utf-8 -*-
#
# testpy documentation build configuration file, created by
# sphinx-quickstart on Tue Sep 16 10:15:21 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'testpy'
copyright = u'2014, Kevin Qu <[email protected]>'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'testpydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'testpy.tex', u'testpy Documentation',
u'Kevin Qu \\textless{}[email protected]\\textgreater{}', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'testpy', u'testpy Documentation',
[u'Kevin Qu <[email protected]>'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'testpy', u'testpy Documentation',
u'Kevin Qu <[email protected]>', 'testpy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| quchunguang/test | testpy/conf.py | Python | mit | 8,262 |
# encoding: utf-8
from __future__ import absolute_import, unicode_literals
import os
import shutil
import tempfile
import unittest
import glib
from mopidy import compat, exceptions
from mopidy.internal import path
import tests
class GetOrCreateDirTest(unittest.TestCase):
def setUp(self): # noqa: N802
self.parent = tempfile.mkdtemp()
def tearDown(self): # noqa: N802
if os.path.isdir(self.parent):
shutil.rmtree(self.parent)
def test_creating_dir(self):
dir_path = os.path.join(self.parent, b'test')
self.assert_(not os.path.exists(dir_path))
created = path.get_or_create_dir(dir_path)
self.assert_(os.path.exists(dir_path))
self.assert_(os.path.isdir(dir_path))
self.assertEqual(created, dir_path)
def test_creating_nested_dirs(self):
level2_dir = os.path.join(self.parent, b'test')
level3_dir = os.path.join(self.parent, b'test', b'test')
self.assert_(not os.path.exists(level2_dir))
self.assert_(not os.path.exists(level3_dir))
created = path.get_or_create_dir(level3_dir)
self.assert_(os.path.exists(level2_dir))
self.assert_(os.path.isdir(level2_dir))
self.assert_(os.path.exists(level3_dir))
self.assert_(os.path.isdir(level3_dir))
self.assertEqual(created, level3_dir)
def test_creating_existing_dir(self):
created = path.get_or_create_dir(self.parent)
self.assert_(os.path.exists(self.parent))
self.assert_(os.path.isdir(self.parent))
self.assertEqual(created, self.parent)
def test_create_dir_with_name_of_existing_file_throws_oserror(self):
conflicting_file = os.path.join(self.parent, b'test')
open(conflicting_file, 'w').close()
dir_path = os.path.join(self.parent, b'test')
with self.assertRaises(OSError):
path.get_or_create_dir(dir_path)
def test_create_dir_with_unicode(self):
with self.assertRaises(ValueError):
dir_path = compat.text_type(os.path.join(self.parent, b'test'))
path.get_or_create_dir(dir_path)
def test_create_dir_with_none(self):
with self.assertRaises(ValueError):
path.get_or_create_dir(None)
class GetOrCreateFileTest(unittest.TestCase):
def setUp(self): # noqa: N802
self.parent = tempfile.mkdtemp()
def tearDown(self): # noqa: N802
if os.path.isdir(self.parent):
shutil.rmtree(self.parent)
def test_creating_file(self):
file_path = os.path.join(self.parent, b'test')
self.assert_(not os.path.exists(file_path))
created = path.get_or_create_file(file_path)
self.assert_(os.path.exists(file_path))
self.assert_(os.path.isfile(file_path))
self.assertEqual(created, file_path)
def test_creating_nested_file(self):
level2_dir = os.path.join(self.parent, b'test')
file_path = os.path.join(self.parent, b'test', b'test')
self.assert_(not os.path.exists(level2_dir))
self.assert_(not os.path.exists(file_path))
created = path.get_or_create_file(file_path)
self.assert_(os.path.exists(level2_dir))
self.assert_(os.path.isdir(level2_dir))
self.assert_(os.path.exists(file_path))
self.assert_(os.path.isfile(file_path))
self.assertEqual(created, file_path)
def test_creating_existing_file(self):
file_path = os.path.join(self.parent, b'test')
path.get_or_create_file(file_path)
created = path.get_or_create_file(file_path)
self.assert_(os.path.exists(file_path))
self.assert_(os.path.isfile(file_path))
self.assertEqual(created, file_path)
def test_create_file_with_name_of_existing_dir_throws_ioerror(self):
conflicting_dir = os.path.join(self.parent)
with self.assertRaises(IOError):
path.get_or_create_file(conflicting_dir)
def test_create_dir_with_unicode_filename_throws_value_error(self):
with self.assertRaises(ValueError):
file_path = compat.text_type(os.path.join(self.parent, b'test'))
path.get_or_create_file(file_path)
def test_create_file_with_none_filename_throws_value_error(self):
with self.assertRaises(ValueError):
path.get_or_create_file(None)
def test_create_dir_without_mkdir(self):
file_path = os.path.join(self.parent, b'foo', b'bar')
with self.assertRaises(IOError):
path.get_or_create_file(file_path, mkdir=False)
def test_create_dir_with_bytes_content(self):
file_path = os.path.join(self.parent, b'test')
created = path.get_or_create_file(file_path, content=b'foobar')
with open(created) as fh:
self.assertEqual(fh.read(), b'foobar')
def test_create_dir_with_unicode_content(self):
file_path = os.path.join(self.parent, b'test')
created = path.get_or_create_file(file_path, content='foobaræøå')
with open(created) as fh:
self.assertEqual(fh.read(), b'foobaræøå')
class PathToFileURITest(unittest.TestCase):
def test_simple_path(self):
result = path.path_to_uri('/etc/fstab')
self.assertEqual(result, 'file:///etc/fstab')
def test_space_in_path(self):
result = path.path_to_uri('/tmp/test this')
self.assertEqual(result, 'file:///tmp/test%20this')
def test_unicode_in_path(self):
result = path.path_to_uri('/tmp/æøå')
self.assertEqual(result, 'file:///tmp/%C3%A6%C3%B8%C3%A5')
def test_utf8_in_path(self):
result = path.path_to_uri('/tmp/æøå'.encode('utf-8'))
self.assertEqual(result, 'file:///tmp/%C3%A6%C3%B8%C3%A5')
def test_latin1_in_path(self):
result = path.path_to_uri('/tmp/æøå'.encode('latin-1'))
self.assertEqual(result, 'file:///tmp/%E6%F8%E5')
class UriToPathTest(unittest.TestCase):
def test_simple_uri(self):
result = path.uri_to_path('file:///etc/fstab')
self.assertEqual(result, '/etc/fstab'.encode('utf-8'))
def test_space_in_uri(self):
result = path.uri_to_path('file:///tmp/test%20this')
self.assertEqual(result, '/tmp/test this'.encode('utf-8'))
def test_unicode_in_uri(self):
result = path.uri_to_path('file:///tmp/%C3%A6%C3%B8%C3%A5')
self.assertEqual(result, '/tmp/æøå'.encode('utf-8'))
def test_latin1_in_uri(self):
result = path.uri_to_path('file:///tmp/%E6%F8%E5')
self.assertEqual(result, '/tmp/æøå'.encode('latin-1'))
class SplitPathTest(unittest.TestCase):
def test_empty_path(self):
self.assertEqual([], path.split_path(''))
def test_single_dir(self):
self.assertEqual(['foo'], path.split_path('foo'))
def test_dirs(self):
self.assertEqual(['foo', 'bar', 'baz'], path.split_path('foo/bar/baz'))
def test_initial_slash_is_ignored(self):
self.assertEqual(
['foo', 'bar', 'baz'], path.split_path('/foo/bar/baz'))
def test_only_slash(self):
self.assertEqual([], path.split_path('/'))
class ExpandPathTest(unittest.TestCase):
# TODO: test via mocks?
def test_empty_path(self):
self.assertEqual(os.path.abspath(b'.'), path.expand_path(b''))
def test_absolute_path(self):
self.assertEqual(b'/tmp/foo', path.expand_path(b'/tmp/foo'))
def test_home_dir_expansion(self):
self.assertEqual(
os.path.expanduser(b'~/foo'), path.expand_path(b'~/foo'))
def test_abspath(self):
self.assertEqual(os.path.abspath(b'./foo'), path.expand_path(b'./foo'))
def test_xdg_subsititution(self):
self.assertEqual(
glib.get_user_data_dir() + b'/foo',
path.expand_path(b'$XDG_DATA_DIR/foo'))
def test_xdg_subsititution_unknown(self):
self.assertIsNone(
path.expand_path(b'/tmp/$XDG_INVALID_DIR/foo'))
class FindMTimesTest(unittest.TestCase):
maxDiff = None
def setUp(self): # noqa: N802
self.tmpdir = tempfile.mkdtemp(b'.mopidy-tests')
def tearDown(self): # noqa: N802
shutil.rmtree(self.tmpdir, ignore_errors=True)
def mkdir(self, *args):
name = os.path.join(self.tmpdir, *[bytes(a) for a in args])
os.mkdir(name)
return name
def touch(self, *args):
name = os.path.join(self.tmpdir, *[bytes(a) for a in args])
open(name, 'w').close()
return name
def test_names_are_bytestrings(self):
"""We shouldn't be mixing in unicode for paths."""
result, errors = path.find_mtimes(tests.path_to_data_dir(''))
for name in result.keys() + errors.keys():
self.assertEqual(name, tests.IsA(bytes))
def test_nonexistent_dir(self):
"""Non existent search roots are an error"""
missing = os.path.join(self.tmpdir, 'does-not-exist')
result, errors = path.find_mtimes(missing)
self.assertEqual(result, {})
self.assertEqual(errors, {missing: tests.IsA(exceptions.FindError)})
def test_empty_dir(self):
"""Empty directories should not show up in results"""
self.mkdir('empty')
result, errors = path.find_mtimes(self.tmpdir)
self.assertEqual(result, {})
self.assertEqual(errors, {})
def test_file_as_the_root(self):
"""Specifying a file as the root should just return the file"""
single = self.touch('single')
result, errors = path.find_mtimes(single)
self.assertEqual(result, {single: tests.any_int})
self.assertEqual(errors, {})
def test_nested_directories(self):
"""Searching nested directories should find all files"""
# Setup foo/bar and baz directories
self.mkdir('foo')
self.mkdir('foo', 'bar')
self.mkdir('baz')
# Touch foo/file foo/bar/file and baz/file
foo_file = self.touch('foo', 'file')
foo_bar_file = self.touch('foo', 'bar', 'file')
baz_file = self.touch('baz', 'file')
result, errors = path.find_mtimes(self.tmpdir)
self.assertEqual(result, {foo_file: tests.any_int,
foo_bar_file: tests.any_int,
baz_file: tests.any_int})
self.assertEqual(errors, {})
def test_missing_permission_to_file(self):
"""Missing permissions to a file is not a search error"""
target = self.touch('no-permission')
os.chmod(target, 0)
result, errors = path.find_mtimes(self.tmpdir)
self.assertEqual({target: tests.any_int}, result)
self.assertEqual({}, errors)
def test_missing_permission_to_directory(self):
"""Missing permissions to a directory is an error"""
directory = self.mkdir('no-permission')
os.chmod(directory, 0)
result, errors = path.find_mtimes(self.tmpdir)
self.assertEqual({}, result)
self.assertEqual({directory: tests.IsA(exceptions.FindError)}, errors)
def test_symlinks_are_ignored(self):
"""By default symlinks should be treated as an error"""
target = self.touch('target')
link = os.path.join(self.tmpdir, 'link')
os.symlink(target, link)
result, errors = path.find_mtimes(self.tmpdir)
self.assertEqual(result, {target: tests.any_int})
self.assertEqual(errors, {link: tests.IsA(exceptions.FindError)})
def test_symlink_to_file_as_root_is_followed(self):
"""Passing a symlink as the root should be followed when follow=True"""
target = self.touch('target')
link = os.path.join(self.tmpdir, 'link')
os.symlink(target, link)
result, errors = path.find_mtimes(link, follow=True)
self.assertEqual({link: tests.any_int}, result)
self.assertEqual({}, errors)
def test_symlink_to_directory_is_followed(self):
pass
def test_symlink_pointing_at_itself_fails(self):
"""Symlink pointing at itself should give as an OS error"""
link = os.path.join(self.tmpdir, 'link')
os.symlink(link, link)
result, errors = path.find_mtimes(link, follow=True)
self.assertEqual({}, result)
self.assertEqual({link: tests.IsA(exceptions.FindError)}, errors)
def test_symlink_pointing_at_parent_fails(self):
"""We should detect a loop via the parent and give up on the branch"""
os.symlink(self.tmpdir, os.path.join(self.tmpdir, 'link'))
result, errors = path.find_mtimes(self.tmpdir, follow=True)
self.assertEqual({}, result)
self.assertEqual(1, len(errors))
self.assertEqual(tests.IsA(Exception), errors.values()[0])
def test_indirect_symlink_loop(self):
"""More indirect loops should also be detected"""
# Setup tmpdir/directory/loop where loop points to tmpdir
directory = os.path.join(self.tmpdir, b'directory')
loop = os.path.join(directory, b'loop')
os.mkdir(directory)
os.symlink(self.tmpdir, loop)
result, errors = path.find_mtimes(self.tmpdir, follow=True)
self.assertEqual({}, result)
self.assertEqual({loop: tests.IsA(Exception)}, errors)
def test_symlink_branches_are_not_excluded(self):
"""Using symlinks to make a file show up multiple times should work"""
self.mkdir('directory')
target = self.touch('directory', 'target')
link1 = os.path.join(self.tmpdir, b'link1')
link2 = os.path.join(self.tmpdir, b'link2')
os.symlink(target, link1)
os.symlink(target, link2)
expected = {target: tests.any_int,
link1: tests.any_int,
link2: tests.any_int}
result, errors = path.find_mtimes(self.tmpdir, follow=True)
self.assertEqual(expected, result)
self.assertEqual({}, errors)
def test_gives_mtime_in_milliseconds(self):
fname = self.touch('foobar')
os.utime(fname, (1, 3.14159265))
result, errors = path.find_mtimes(fname)
self.assertEqual(len(result), 1)
mtime, = result.values()
self.assertEqual(mtime, 3141)
self.assertEqual(errors, {})
# TODO: kill this in favour of just os.path.getmtime + mocks
class MtimeTest(unittest.TestCase):
def tearDown(self): # noqa: N802
path.mtime.undo_fake()
def test_mtime_of_current_dir(self):
mtime_dir = int(os.stat('.').st_mtime)
self.assertEqual(mtime_dir, path.mtime('.'))
def test_fake_time_is_returned(self):
path.mtime.set_fake_time(123456)
self.assertEqual(path.mtime('.'), 123456)
| diandiankan/mopidy | tests/internal/test_path.py | Python | apache-2.0 | 14,675 |
"""
This module contains functionality for navigation/browsing through text without selecting.
"""
from . import commands
from logging import debug
from .contract import pre, post
def movehalfpagedown(doc):
"""Move half a page down."""
width, height = doc.ui.viewport_size
offset = doc.ui.viewport_offset
new_offset = move_n_wrapped_lines_down(doc.text, width, offset, int(height / 2))
doc.ui.viewport_offset = new_offset
commands.movehalfpagedown = movehalfpagedown
def movehalfpageup(doc):
"""Move half a page down."""
width, height = doc.ui.viewport_size
offset = doc.ui.viewport_offset
new_offset = move_n_wrapped_lines_up(doc.text, width, offset, int(height / 2))
doc.ui.viewport_offset = new_offset
commands.movehalfpageup = movehalfpageup
def movepagedown(doc):
"""Move a page down."""
width, height = doc.ui.viewport_size
offset = doc.ui.viewport_offset
new_offset = move_n_wrapped_lines_down(doc.text, width, offset, height)
doc.ui.viewport_offset = new_offset
commands.movepagedown = movepagedown
def movepageup(doc):
"""Move a page up."""
width, height = doc.ui.viewport_size
offset = doc.ui.viewport_offset
new_offset = move_n_wrapped_lines_up(doc.text, width, offset, height)
doc.ui.viewport_offset = new_offset
commands.movepageup = movepageup
def center_around_selection(doc):
"""Center offset around last interval of selection."""
width, height = doc.ui.viewport_size
debug('Viewport height: {}, {}'.format(height, doc.selection[-1][1]))
nr_lines = move_n_wrapped_lines_up(
doc.text, width, doc.selection[-1][1], int(height / 2))
doc.ui.viewport_offset = nr_lines
def count_newlines(text, interval):
beg, end = interval
return text.count('\n', beg, end)
def move_n_wrapped_lines_up_pre(text, max_line_width, start, n):
assert max_line_width > 0
assert 0 <= start < len(text)
assert n >= 0
def move_n_wrapped_lines_up_post(result, text, max_line_width, start, n):
nr_eols = count_newlines(text, (result, start))
assert nr_eols <= n
assert result == 0 or nr_eols == n
@pre(move_n_wrapped_lines_up_pre)
@post(move_n_wrapped_lines_up_post)
def move_n_wrapped_lines_up(text, max_line_width, start, n):
"""
Return the first position in the line which ends with the nth
wrapped end-of-line counting back from start (exclusive).
In other words, return position right after the (n+1)th wrapped end-of-line,
counting back from position start (exclusive).
If there are less than n+1 wrapped end-of-lines before start, return 0.
The reason that we do not return the position of the wrapped end-of-line itself,
is because the virtual eols that emerge from the wrapping do not correspond to
a character in the text and thus do not have a position.
"""
position = start
while 1:
# Note that for rfind, the end parameter is exclusive
previousline = text.rfind('\n', 0, position)
if previousline <= 0:
return 0
n -= int((position - previousline) / max_line_width) + 1
if n <= 0:
return position + 1
position = previousline
def move_n_wrapped_lines_down_pre(text, max_line_width, start, n):
assert max_line_width > 0
assert 0 <= start < len(text)
assert n >= 0
def move_n_wrapped_lines_down_post(result, text, max_line_width, start, n):
nr_eols = count_newlines(text, (start, result))
assert nr_eols <= n
assert result == len(text) or nr_eols == n
@pre(move_n_wrapped_lines_down_pre)
@post(move_n_wrapped_lines_down_post)
def move_n_wrapped_lines_down(text: str, max_line_width: int, start: int, n: int):
"""
Return position right after the nth wrapped end-of-line,
counting from position start (inclusive).
If there are less than n wrapped end-of-lines after start,
return the position of the eof position.
So if the nth wrapped end-of-line would be the an actual eol and happed to be the last
character in the file, return the eof position.
The reason that we do not return the position of the wrapped end-of-line itself,
is because the virtual eols that emerge from the wrapping do not correspond to
a character in the text and thus do not have a position.
"""
position = start
eof = len(text)
while 1:
eol = text.find('\n', position)
if eol == -1 or eol == eof - 1:
return eof
nextline = eol + 1
n -= int((nextline - position) / max_line_width) + 1
if n <= 0:
return position + 1
position = nextline
def coord_to_position(line, column, text, crop=False):
pos = 0
while line > 1: # line numbers start with 1
eol = text.find('\n', pos)
if eol == -1:
if crop:
return len(text) - 1
raise ValueError('Line number reaches beyond text.')
pos = eol + 1
line -= 1
pos += column - 1 # column numbers start with 1
if pos >= len(text) and not crop:
raise ValueError('Column number reaches beyond text.')
pos = min(pos, len(text) - 1)
#assert (line, column) == position_to_coord(pos, text)
return pos
def position_to_coord(pos, text):
if pos >= len(text):
raise ValueError('Position reaches beyond text.')
i = 0 # First character of current line
line = 1 # Line numbers start with 1
while i < pos:
eol = text.find('\n', i)
if eol >= pos:
break
else:
line += 1
i = eol + 1
column = pos - i + 1 # Column numbers start with 1
assert pos == coord_to_position(line, column, text)
return line, column
def is_position_visible(doc, pos):
"""Determince whether position is visible on screen."""
beg = doc.ui.viewport_offset
width, height = doc.ui.viewport_size
end = move_n_wrapped_lines_down(doc.text, width, beg, height)
return beg <= pos < end
| Chiel92/fate | fate/navigation.py | Python | mit | 6,003 |
"""
Views related to the video upload feature
"""
from boto import s3
import csv
from uuid import uuid4
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseNotFound
from django.utils.translation import ugettext as _, ugettext_noop
from django.views.decorators.http import require_GET, require_http_methods
import rfc6266
from edxval.api import create_video, get_videos_for_ids, SortDirection, VideoSortField
from opaque_keys.edx.keys import CourseKey
from contentstore.models import VideoUploadConfig
from contentstore.utils import reverse_course_url
from edxmako.shortcuts import render_to_response
from util.json_request import expect_json, JsonResponse
from xmodule.assetstore import AssetMetadata
from xmodule.modulestore.django import modulestore
from .course import get_course_and_check_access
__all__ = ["videos_handler", "video_encodings_download"]
# String constant used in asset keys to identify video assets.
VIDEO_ASSET_TYPE = "video"
# Default expiration, in seconds, of one-time URLs used for uploading videos.
KEY_EXPIRATION_IN_SECONDS = 86400
class StatusDisplayStrings(object):
"""
A class to map status strings as stored in VAL to display strings for the
video upload page
"""
# Translators: This is the status of an active video upload
_UPLOADING = ugettext_noop("Uploading")
# Translators: This is the status for a video that the servers are currently processing
_IN_PROGRESS = ugettext_noop("In Progress")
# Translators: This is the status for a video that the servers have successfully processed
_COMPLETE = ugettext_noop("Ready")
# Translators: This is the status for a video that the servers have failed to process
_FAILED = ugettext_noop("Failed")
# Translators: This is the status for a video for which an invalid
# processing token was provided in the course settings
_INVALID_TOKEN = ugettext_noop("Invalid Token")
# Translators: This is the status for a video that is in an unknown state
_UNKNOWN = ugettext_noop("Unknown")
_STATUS_MAP = {
"upload": _UPLOADING,
"ingest": _IN_PROGRESS,
"transcode_queue": _IN_PROGRESS,
"transcode_active": _IN_PROGRESS,
"file_delivered": _COMPLETE,
"file_complete": _COMPLETE,
"file_corrupt": _FAILED,
"pipeline_error": _FAILED,
"invalid_token": _INVALID_TOKEN
}
@staticmethod
def get(val_status):
"""Map a VAL status string to a localized display string"""
return _(StatusDisplayStrings._STATUS_MAP.get(val_status, StatusDisplayStrings._UNKNOWN))
@expect_json
@login_required
@require_http_methods(("GET", "POST"))
def videos_handler(request, course_key_string):
"""
The restful handler for video uploads.
GET
html: return an HTML page to display previous video uploads and allow
new ones
json: return json representing the videos that have been uploaded and
their statuses
POST
json: create a new video upload; the actual files should not be provided
to this endpoint but rather PUT to the respective upload_url values
contained in the response
"""
course = _get_and_validate_course(course_key_string, request.user)
if not course:
return HttpResponseNotFound()
if request.method == "GET":
if "application/json" in request.META.get("HTTP_ACCEPT", ""):
return videos_index_json(course)
else:
return videos_index_html(course)
else:
return videos_post(course, request)
@login_required
@require_GET
def video_encodings_download(request, course_key_string):
"""
Returns a CSV report containing the encoded video URLs for video uploads
in the following format:
Video ID,Name,Status,Profile1 URL,Profile2 URL
aaaaaaaa-aaaa-4aaa-aaaa-aaaaaaaaaaaa,video.mp4,Complete,http://example.com/prof1.mp4,http://example.com/prof2.mp4
"""
course = _get_and_validate_course(course_key_string, request.user)
if not course:
return HttpResponseNotFound()
def get_profile_header(profile):
"""Returns the column header string for the given profile's URLs"""
# Translators: This is the header for a CSV file column
# containing URLs for video encodings for the named profile
# (e.g. desktop, mobile high quality, mobile low quality)
return _("{profile_name} URL").format(profile_name=profile)
profile_whitelist = VideoUploadConfig.get_profile_whitelist()
videos = list(_get_videos(course))
name_col = _("Name")
duration_col = _("Duration")
added_col = _("Date Added")
video_id_col = _("Video ID")
status_col = _("Status")
profile_cols = [get_profile_header(profile) for profile in profile_whitelist]
def make_csv_dict(video):
"""
Makes a dictionary suitable for writing CSV output. This involves
extracting the required items from the original video dict and
converting all keys and values to UTF-8 encoded string objects,
because the CSV module doesn't play well with unicode objects.
"""
# Translators: This is listed as the duration for a video that has not
# yet reached the point in its processing by the servers where its
# duration is determined.
duration_val = str(video["duration"]) if video["duration"] > 0 else _("Pending")
ret = dict(
[
(name_col, video["client_video_id"]),
(duration_col, duration_val),
(added_col, video["created"].isoformat()),
(video_id_col, video["edx_video_id"]),
(status_col, video["status"]),
] +
[
(get_profile_header(encoded_video["profile"]), encoded_video["url"])
for encoded_video in video["encoded_videos"]
if encoded_video["profile"] in profile_whitelist
]
)
return {
key.encode("utf-8"): value.encode("utf-8")
for key, value in ret.items()
}
response = HttpResponse(content_type="text/csv")
# Translators: This is the suggested filename when downloading the URL
# listing for videos uploaded through Studio
filename = _("{course}_video_urls").format(course=course.id.course)
# See https://tools.ietf.org/html/rfc6266#appendix-D
response["Content-Disposition"] = rfc6266.build_header(
filename + ".csv",
filename_compat="video_urls.csv"
)
writer = csv.DictWriter(
response,
[
col_name.encode("utf-8")
for col_name
in [name_col, duration_col, added_col, video_id_col, status_col] + profile_cols
],
dialect=csv.excel
)
writer.writeheader()
for video in videos:
writer.writerow(make_csv_dict(video))
return response
def _get_and_validate_course(course_key_string, user):
"""
Given a course key, return the course if it exists, the given user has
access to it, and it is properly configured for video uploads
"""
course_key = CourseKey.from_string(course_key_string)
# For now, assume all studio users that have access to the course can upload videos.
# In the future, we plan to add a new org-level role for video uploaders.
course = get_course_and_check_access(course_key, user)
if (
settings.FEATURES["ENABLE_VIDEO_UPLOAD_PIPELINE"] and
getattr(settings, "VIDEO_UPLOAD_PIPELINE", None) and
course and
course.video_pipeline_configured
):
return course
else:
return None
def _get_videos(course):
"""
Retrieves the list of videos from VAL corresponding to the videos listed in
the asset metadata store.
"""
edx_videos_ids = [
v.asset_id.path
for v in modulestore().get_all_asset_metadata(course.id, VIDEO_ASSET_TYPE)
]
videos = list(get_videos_for_ids(edx_videos_ids, VideoSortField.created, SortDirection.desc))
# convert VAL's status to studio's Video Upload feature status.
for video in videos:
video["status"] = StatusDisplayStrings.get(video["status"])
return videos
def _get_index_videos(course):
"""
Returns the information about each video upload required for the video list
"""
return list(
{
attr: video[attr]
for attr in ["edx_video_id", "client_video_id", "created", "duration", "status"]
}
for video in _get_videos(course)
)
def videos_index_html(course):
"""
Returns an HTML page to display previous video uploads and allow new ones
"""
return render_to_response(
"videos_index.html",
{
"context_course": course,
"post_url": reverse_course_url("videos_handler", unicode(course.id)),
"encodings_download_url": reverse_course_url("video_encodings_download", unicode(course.id)),
"previous_uploads": _get_index_videos(course),
"concurrent_upload_limit": settings.VIDEO_UPLOAD_PIPELINE.get("CONCURRENT_UPLOAD_LIMIT", 0),
}
)
def videos_index_json(course):
"""
Returns JSON in the following format:
{
"videos": [{
"edx_video_id": "aaaaaaaa-aaaa-4aaa-aaaa-aaaaaaaaaaaa",
"client_video_id": "video.mp4",
"created": "1970-01-01T00:00:00Z",
"duration": 42.5,
"status": "upload"
}]
}
"""
return JsonResponse({"videos": _get_index_videos(course)}, status=200)
def videos_post(course, request):
"""
Input (JSON):
{
"files": [{
"file_name": "video.mp4",
"content_type": "video/mp4"
}]
}
Returns (JSON):
{
"files": [{
"file_name": "video.mp4",
"upload_url": "http://example.com/put_video"
}]
}
The returned array corresponds exactly to the input array.
"""
error = None
if "files" not in request.json:
error = "Request object is not JSON or does not contain 'files'"
elif any(
"file_name" not in file or "content_type" not in file
for file in request.json["files"]
):
error = "Request 'files' entry does not contain 'file_name' and 'content_type'"
if error:
return JsonResponse({"error": error}, status=400)
bucket = storage_service_bucket()
course_video_upload_token = course.video_upload_pipeline["course_video_upload_token"]
req_files = request.json["files"]
resp_files = []
for req_file in req_files:
file_name = req_file["file_name"]
edx_video_id = unicode(uuid4())
key = storage_service_key(bucket, file_name=edx_video_id)
for metadata_name, value in [
("course_video_upload_token", course_video_upload_token),
("client_video_id", file_name),
("course_key", unicode(course.id)),
]:
key.set_metadata(metadata_name, value)
upload_url = key.generate_url(
KEY_EXPIRATION_IN_SECONDS,
"PUT",
headers={"Content-Type": req_file["content_type"]}
)
# persist edx_video_id as uploaded through this course
video_meta_data = AssetMetadata(course.id.make_asset_key(VIDEO_ASSET_TYPE, edx_video_id))
modulestore().save_asset_metadata(video_meta_data, request.user.id)
# persist edx_video_id in VAL
create_video({
"edx_video_id": edx_video_id,
"status": "upload",
"client_video_id": file_name,
"duration": 0,
"encoded_videos": [],
})
resp_files.append({"file_name": file_name, "upload_url": upload_url})
return JsonResponse({"files": resp_files}, status=200)
def storage_service_bucket():
"""
Returns an S3 bucket for video uploads.
"""
conn = s3.connection.S3Connection(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY
)
return conn.get_bucket(settings.VIDEO_UPLOAD_PIPELINE["BUCKET"])
def storage_service_key(bucket, file_name):
"""
Returns an S3 key to the given file in the given bucket.
"""
key_name = "{}/{}".format(
settings.VIDEO_UPLOAD_PIPELINE.get("ROOT_PATH", ""),
file_name
)
return s3.key.Key(bucket, key_name)
| sameetb-cuelogic/edx-platform-test | cms/djangoapps/contentstore/views/videos.py | Python | agpl-3.0 | 12,545 |
""" rewrite of lambdify - This stuff is not stable at all.
It is for internal use in the new plotting module.
It may (will! see the Q'n'A in the source) be rewritten.
It's completely self contained. Especially it does not use lambdarepr.
It does not aim to replace the current lambdify. Most importantly it will never
ever support anything else than sympy expressions (no Matrices, dictionaries
and so on).
"""
from __future__ import print_function, division
import re
from sympy import Symbol, NumberSymbol, I, zoo, oo
from sympy.core.compatibility import exec_
from sympy.utilities.iterables import numbered_symbols
# We parse the expression string into a tree that identifies functions. Then
# we translate the names of the functions and we translate also some strings
# that are not names of functions (all this according to translation
# dictionaries).
# If the translation goes to another module (like numpy) the
# module is imported and 'func' is translated to 'module.func'.
# If a function can not be translated, the inner nodes of that part of the
# tree are not translated. So if we have Integral(sqrt(x)), sqrt is not
# translated to np.sqrt and the Integral does not crash.
# A namespace for all this is generated by crawling the (func, args) tree of
# the expression. The creation of this namespace involves many ugly
# workarounds.
# The namespace consists of all the names needed for the sympy expression and
# all the name of modules used for translation. Those modules are imported only
# as a name (import numpy as np) in order to keep the namespace small and
# manageable.
# Please, if there is a bug, do not try to fix it here! Rewrite this by using
# the method proposed in the last Q'n'A below. That way the new function will
# work just as well, be just as simple, but it wont need any new workarounds.
# If you insist on fixing it here, look at the workarounds in the function
# sympy_expression_namespace and in lambdify.
# Q: Why are you not using python abstract syntax tree?
# A: Because it is more complicated and not much more powerful in this case.
# Q: What if I have Symbol('sin') or g=Function('f')?
# A: You will break the algorithm. We should use srepr to defend against this?
# The problem with Symbol('sin') is that it will be printed as 'sin'. The
# parser will distinguish it from the function 'sin' because functions are
# detected thanks to the opening parenthesis, but the lambda expression won't
# understand the difference if we have also the sin function.
# The solution (complicated) is to use srepr and maybe ast.
# The problem with the g=Function('f') is that it will be printed as 'f' but in
# the global namespace we have only 'g'. But as the same printer is used in the
# constructor of the namespace there will be no problem.
# Q: What if some of the printers are not printing as expected?
# A: The algorithm wont work. You must use srepr for those cases. But even
# srepr may not print well. All problems with printers should be considered
# bugs.
# Q: What about _imp_ functions?
# A: Those are taken care for by evalf. A special case treatment will work
# faster but it's not worth the code complexity.
# Q: Will ast fix all possible problems?
# A: No. You will always have to use some printer. Even srepr may not work in
# some cases. But if the printer does not work, that should be considered a
# bug.
# Q: Is there same way to fix all possible problems?
# A: Probably by constructing our strings ourself by traversing the (func,
# args) tree and creating the namespace at the same time. That actually sounds
# good.
from sympy.external import import_module
import warnings
#TODO debuging output
class vectorized_lambdify(object):
""" Return a sufficiently smart, vectorized and lambdified function.
Returns only reals.
This function uses experimental_lambdify to created a lambdified
expression ready to be used with numpy. Many of the functions in sympy
are not implemented in numpy so in some cases we resort to python cmath or
even to evalf.
The following translations are tried:
only numpy complex
- on errors raised by sympy trying to work with ndarray:
only python cmath and then vectorize complex128
When using python cmath there is no need for evalf or float/complex
because python cmath calls those.
This function never tries to mix numpy directly with evalf because numpy
does not understand sympy Float. If this is needed one can use the
float_wrap_evalf/complex_wrap_evalf options of experimental_lambdify or
better one can be explicit about the dtypes that numpy works with.
Check numpy bug http://projects.scipy.org/numpy/ticket/1013 to know what
types of errors to expect.
"""
def __init__(self, args, expr):
self.args = args
self.expr = expr
self.lambda_func = experimental_lambdify(args, expr, use_np=True)
self.vector_func = self.lambda_func
self.failure = False
def __call__(self, *args):
np = import_module('numpy')
np_old_err = np.seterr(invalid='raise')
try:
temp_args = (np.array(a, dtype=np.complex) for a in args)
results = self.vector_func(*temp_args)
results = np.ma.masked_where(
np.abs(results.imag) > 1e-7 * np.abs(results),
results.real, copy=False)
except Exception as e:
#DEBUG: print 'Error', type(e), e
if ((isinstance(e, TypeError)
and 'unhashable type: \'numpy.ndarray\'' in str(e))
or
(isinstance(e, ValueError)
and ('Invalid limits given:' in str(e)
or 'negative dimensions are not allowed' in str(e) # XXX
or 'sequence too large; must be smaller than 32' in str(e)))): # XXX
# Almost all functions were translated to numpy, but some were
# left as sympy functions. They recieved an ndarray as an
# argument and failed.
# sin(ndarray(...)) raises "unhashable type"
# Integral(x, (x, 0, ndarray(...))) raises "Invalid limits"
# other ugly exceptions that are not well understood (marked with XXX)
# TODO: Cleanup the ugly special cases marked with xxx above.
# Solution: use cmath and vectorize the final lambda.
self.lambda_func = experimental_lambdify(
self.args, self.expr, use_python_cmath=True)
self.vector_func = np.vectorize(
self.lambda_func, otypes=[np.complex])
results = self.vector_func(*args)
results = np.ma.masked_where(
np.abs(results.imag) > 1e-7 * np.abs(results),
results.real, copy=False)
else:
# Complete failure. One last try with no translations, only
# wrapping in complex((...).evalf()) and returning the real
# part.
if self.failure:
raise e
else:
self.failure = True
self.lambda_func = experimental_lambdify(
self.args, self.expr, use_evalf=True,
complex_wrap_evalf=True)
self.vector_func = np.vectorize(
self.lambda_func, otypes=[np.complex])
results = self.vector_func(*args)
results = np.ma.masked_where(
np.abs(results.imag) > 1e-7 * np.abs(results),
results.real, copy=False)
warnings.warn('The evaluation of the expression is'
' problematic. We are trying a failback method'
' that may still work. Please report this as a bug.')
finally:
np.seterr(**np_old_err)
return results
class lambdify(object):
"""Returns the lambdified function.
This function uses experimental_lambdify to create a lambdified
expression. It uses cmath to lambdify the expression. If the function
is not implemented in python cmath, python cmath calls evalf on those
functions.
"""
def __init__(self, args, expr):
self.args = args
self.expr = expr
self.lambda_func = experimental_lambdify(args, expr, use_evalf=True,
use_python_cmath=True)
self.failure = False
def __call__(self, args):
args = complex(args)
try:
#The result can be sympy.Float. Hence wrap it with complex type.
result = complex(self.lambda_func(args))
if abs(result.imag) > 1e-7 * abs(result):
return None
else:
return result.real
except Exception as e:
# The exceptions raised by sympy, cmath are not consistent and
# hence it is not possible to specify all the exceptions that
# are to be caught. Presently there are no cases for which the code
# reaches this block other than ZeroDivisionError and complex
# comparision. Also the exception is caught only once. If the
# exception repeats itself,
# then it is not caught and the corresponding error is raised.
# XXX: Remove catching all exceptions once the plotting module
# is heavily tested.
if isinstance(e, ZeroDivisionError):
return None
elif isinstance(e, TypeError) and ('no ordering relation is'
' defined for complex numbers'
in str(e)):
self.lambda_func = experimental_lambdify(self.args, self.expr,
use_evalf=True,
use_python_math=True)
result = self.lambda_func(args.real)
return result
else:
if self.failure:
raise e
#Failure
#Try wrapping it with complex(..).evalf()
self.failure = True
self.lambda_func = experimental_lambdify(self.args, self.expr,
use_evalf=True,
complex_wrap_evalf=True)
result = self.lambda_func(args)
warnings.warn('The evaluation of the expression is'
' problematic. We are trying a failback method'
' that may still work. Please report this as a bug.')
if abs(result.imag) > 1e-7 * abs(result):
return None
else:
return result.real
def experimental_lambdify(*args, **kwargs):
l = Lambdifier(*args, **kwargs)
return l.lambda_func
class Lambdifier(object):
def __init__(self, args, expr, print_lambda=False, use_evalf=False,
float_wrap_evalf=False, complex_wrap_evalf=False,
use_np=False, use_python_math=False, use_python_cmath=False,
use_interval=False):
self.print_lambda = print_lambda
self.use_evalf = use_evalf
self.float_wrap_evalf = float_wrap_evalf
self.complex_wrap_evalf = complex_wrap_evalf
self.use_np = use_np
self.use_python_math = use_python_math
self.use_python_cmath = use_python_cmath
self.use_interval = use_interval
# Constructing the argument string
# - check
if not all([isinstance(a, Symbol) for a in args]):
raise ValueError('The arguments must be Symbols.')
# - use numbered symbols
syms = numbered_symbols(exclude=expr.free_symbols)
newargs = [next(syms) for i in args]
expr = expr.xreplace(dict(zip(args, newargs)))
argstr = ', '.join([str(a) for a in newargs])
del syms, newargs, args
# Constructing the translation dictionaries and making the translation
self.dict_str = self.get_dict_str()
self.dict_fun = self.get_dict_fun()
exprstr = str(expr)
newexpr = self.tree2str_translate(self.str2tree(exprstr))
# Constructing the namespaces
namespace = {}
namespace.update(self.sympy_atoms_namespace(expr))
namespace.update(self.sympy_expression_namespace(expr))
# XXX Workaround
# Ugly workaround because Pow(a,Half) prints as sqrt(a)
# and sympy_expression_namespace can not catch it.
from sympy import sqrt
namespace.update({'sqrt': sqrt})
# End workaround.
if use_python_math:
namespace.update({'math': __import__('math')})
if use_python_cmath:
namespace.update({'cmath': __import__('cmath')})
if use_np:
try:
namespace.update({'np': __import__('numpy')})
except ImportError:
raise ImportError(
'experimental_lambdify failed to import numpy.')
if use_interval:
namespace.update({'imath': __import__(
'sympy.plotting.intervalmath', fromlist=['intervalmath'])})
namespace.update({'math': __import__('math')})
# Construct the lambda
if self.print_lambda:
print(newexpr)
eval_str = 'lambda %s : ( %s )' % (argstr, newexpr)
exec_("from __future__ import division; MYNEWLAMBDA = %s" % eval_str, namespace)
self.lambda_func = namespace['MYNEWLAMBDA']
##############################################################################
# Dicts for translating from sympy to other modules
##############################################################################
###
# builtins
###
# Functions with different names in builtins
builtin_functions_different = {
'Min': 'min',
'Max': 'max',
'Abs': 'abs',
}
# Strings that should be translated
builtin_not_functions = {
'I': '1j',
'oo': '1e400',
}
###
# numpy
###
# Functions that are the same in numpy
numpy_functions_same = [
'sin', 'cos', 'tan', 'sinh', 'cosh', 'tanh', 'exp', 'log',
'sqrt', 'floor', 'conjugate',
]
# Functions with different names in numpy
numpy_functions_different = {
"acos": "arccos",
"acosh": "arccosh",
"arg": "angle",
"asin": "arcsin",
"asinh": "arcsinh",
"atan": "arctan",
"atan2": "arctan2",
"atanh": "arctanh",
"ceiling": "ceil",
"im": "imag",
"ln": "log",
"Max": "amax",
"Min": "amin",
"re": "real",
"Abs": "abs",
}
# Strings that should be translated
numpy_not_functions = {
'pi': 'np.pi',
'oo': 'np.inf',
'E': 'np.e',
}
###
# python math
###
# Functions that are the same in math
math_functions_same = [
'sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'atan2',
'sinh', 'cosh', 'tanh', 'asinh', 'acosh', 'atanh',
'exp', 'log', 'erf', 'sqrt', 'floor', 'factorial', 'gamma',
]
# Functions with different names in math
math_functions_different = {
'ceiling': 'ceil',
'ln': 'log',
'loggamma': 'lgamma'
}
# Strings that should be translated
math_not_functions = {
'pi': 'math.pi',
'E': 'math.e',
}
###
# python cmath
###
# Functions that are the same in cmath
cmath_functions_same = [
'sin', 'cos', 'tan', 'asin', 'acos', 'atan',
'sinh', 'cosh', 'tanh', 'asinh', 'acosh', 'atanh',
'exp', 'log', 'sqrt',
]
# Functions with different names in cmath
cmath_functions_different = {
'ln': 'log',
'arg': 'phase',
}
# Strings that should be translated
cmath_not_functions = {
'pi': 'cmath.pi',
'E': 'cmath.e',
}
###
# intervalmath
###
interval_not_functions = {
'pi': 'math.pi',
'E': 'math.e'
}
interval_functions_same = [
'sin', 'cos', 'exp', 'tan', 'atan', 'log',
'sqrt', 'cosh', 'sinh', 'tanh', 'floor',
'acos', 'asin', 'acosh', 'asinh', 'atanh',
'Abs', 'And', 'Or'
]
interval_functions_different = {
'Min': 'imin',
'Max': 'imax',
'ceiling': 'ceil',
}
###
# mpmath, etc
###
#TODO
###
# Create the final ordered tuples of dictionaries
###
# For strings
def get_dict_str(self):
dict_str = dict(self.builtin_not_functions)
if self.use_np:
dict_str.update(self.numpy_not_functions)
if self.use_python_math:
dict_str.update(self.math_not_functions)
if self.use_python_cmath:
dict_str.update(self.cmath_not_functions)
if self.use_interval:
dict_str.update(self.interval_not_functions)
return dict_str
# For functions
def get_dict_fun(self):
dict_fun = dict(self.builtin_functions_different)
if self.use_np:
for s in self.numpy_functions_same:
dict_fun[s] = 'np.' + s
for k, v in self.numpy_functions_different.items():
dict_fun[k] = 'np.' + v
if self.use_python_math:
for s in self.math_functions_same:
dict_fun[s] = 'math.' + s
for k, v in self.math_functions_different.items():
dict_fun[k] = 'math.' + v
if self.use_python_cmath:
for s in self.cmath_functions_same:
dict_fun[s] = 'cmath.' + s
for k, v in self.cmath_functions_different.items():
dict_fun[k] = 'cmath.' + v
if self.use_interval:
for s in self.interval_functions_same:
dict_fun[s] = 'imath.' + s
for k, v in self.interval_functions_different.items():
dict_fun[k] = 'imath.' + v
return dict_fun
##############################################################################
# The translator functions, tree parsers, etc.
##############################################################################
def str2tree(self, exprstr):
"""Converts an expression string to a tree.
Functions are represented by ('func_name(', tree_of_arguments).
Other expressions are (head_string, mid_tree, tail_str).
Expressions that do not contain functions are directly returned.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy import Integral, sin
>>> from sympy.plotting.experimental_lambdify import Lambdifier
>>> str2tree = Lambdifier([x], x).str2tree
>>> str2tree(str(Integral(x, (x, 1, y))))
('', ('Integral(', 'x, (x, 1, y)'), ')')
>>> str2tree(str(x+y))
'x + y'
>>> str2tree(str(x+y*sin(z)+1))
('x + y*', ('sin(', 'z'), ') + 1')
>>> str2tree('sin(y*(y + 1.1) + (sin(y)))')
('', ('sin(', ('y*(y + 1.1) + (', ('sin(', 'y'), '))')), ')')
"""
#matches the first 'function_name('
first_par = re.search(r'(\w+\()', exprstr)
if first_par is None:
return exprstr
else:
start = first_par.start()
end = first_par.end()
head = exprstr[:start]
func = exprstr[start:end]
tail = exprstr[end:]
count = 0
for i, c in enumerate(tail):
if c == '(':
count += 1
elif c == ')':
count -= 1
if count == -1:
break
func_tail = self.str2tree(tail[:i])
tail = self.str2tree(tail[i:])
return (head, (func, func_tail), tail)
@classmethod
def tree2str(cls, tree):
"""Converts a tree to string without translations.
Examples
========
>>> from sympy.abc import x, y, z
>>> from sympy import Integral, sin
>>> from sympy.plotting.experimental_lambdify import Lambdifier
>>> str2tree = Lambdifier([x], x).str2tree
>>> tree2str = Lambdifier([x], x).tree2str
>>> tree2str(str2tree(str(x+y*sin(z)+1)))
'x + y*sin(z) + 1'
"""
if isinstance(tree, str):
return tree
else:
return ''.join(map(cls.tree2str, tree))
def tree2str_translate(self, tree):
"""Converts a tree to string with translations.
Function names are translated by translate_func.
Other strings are translated by translate_str.
"""
if isinstance(tree, str):
return self.translate_str(tree)
elif isinstance(tree, tuple) and len(tree) == 2:
return self.translate_func(tree[0][:-1], tree[1])
else:
return ''.join([self.tree2str_translate(t) for t in tree])
def translate_str(self, estr):
"""Translate substrings of estr using in order the dictionaries in
dict_tuple_str."""
for pattern, repl in self.dict_str.items():
estr = re.sub(pattern, repl, estr)
return estr
def translate_func(self, func_name, argtree):
"""Translate function names and the tree of arguments.
If the function name is not in the dictionaries of dict_tuple_fun then the
function is surrounded by a float((...).evalf()).
The use of float is necessary as np.<function>(sympy.Float(..)) raises an
error."""
if func_name in self.dict_fun:
new_name = self.dict_fun[func_name]
argstr = self.tree2str_translate(argtree)
return new_name + '(' + argstr
else:
template = '(%s(%s)).evalf(' if self.use_evalf else '%s(%s'
if self.float_wrap_evalf:
template = 'float(%s)' % template
elif self.complex_wrap_evalf:
template = 'complex(%s)' % template
return template % (func_name, self.tree2str(argtree))
##############################################################################
# The namespace constructors
##############################################################################
@classmethod
def sympy_expression_namespace(cls, expr):
"""Traverses the (func, args) tree of an expression and creates a sympy
namespace. All other modules are imported only as a module name. That way
the namespace is not poluted and rests quite small. It probably causes much
more variable lookups and so it takes more time, but there are no tests on
that for the moment."""
if expr is None:
return {}
else:
funcname = str(expr.func)
# XXX Workaround
# Here we add an ugly workaround because str(func(x))
# is not always the same as str(func). Eg
# >>> str(Integral(x))
# "Integral(x)"
# >>> str(Integral)
# "<class 'sympy.integrals.integrals.Integral'>"
# >>> str(sqrt(x))
# "sqrt(x)"
# >>> str(sqrt)
# "<function sqrt at 0x3d92de8>"
# >>> str(sin(x))
# "sin(x)"
# >>> str(sin)
# "sin"
# Either one of those can be used but not all at the same time.
# The code considers the sin example as the right one.
regexlist = [
r'<class \'sympy[\w.]*?.([\w]*)\'>$',
# the example Integral
r'<function ([\w]*) at 0x[\w]*>$', # the example sqrt
]
for r in regexlist:
m = re.match(r, funcname)
if m is not None:
funcname = m.groups()[0]
# End of the workaround
# XXX debug: print funcname
args_dict = {}
for a in expr.args:
if (isinstance(a, Symbol) or
isinstance(a, NumberSymbol) or
a in [I, zoo, oo]):
continue
else:
args_dict.update(cls.sympy_expression_namespace(a))
args_dict.update({funcname: expr.func})
return args_dict
@staticmethod
def sympy_atoms_namespace(expr):
"""For no real reason this function is separated from
sympy_expression_namespace. It can be moved to it."""
atoms = expr.atoms(Symbol, NumberSymbol, I, zoo, oo)
d = {}
for a in atoms:
# XXX debug: print 'atom:' + str(a)
d[str(a)] = a
return d
| sahilshekhawat/sympy | sympy/plotting/experimental_lambdify.py | Python | bsd-3-clause | 25,111 |
import re
import navi_alphabet
from string import Template
word_path = 'word_list.txt'
output_path = 'poster/words.tex'
word_template = Template(r'\wordtype{$wtype} \wordgroup{$wgroups} \naviword{$word}')
word_group_template = Template(r'\wordgroup{$wgroup}')
def main():
def sub(w):
word, wtype, wgroups = w
word = word.replace(navi_alphabet.tiftang1, "'").replace(navi_alphabet.tiftang2, "'").strip()
wtype = wtype.replace('.', '')
if wgroups:
wgroups = ' '.join( word_group_template.substitute(wgroup=wg) for wg in wgroups.split(',') )
return word_template.substitute(word=word, wtype=wtype, wgroups=wgroups)
outfile = open(output_path, 'w', encoding='utf-8')
words = [ w.split(';') for w in open(word_path, encoding='utf-8') ]
words.sort(key=lambda w: navi_alphabet.sort_key(w[0]))
words = [ sub(w) for w in words ]
words_tex = '\n'.join(words)
print(words_tex, file=outfile)
if __name__ == '__main__':
main() | EyeOfPython/navi-poster | navi_poster.py | Python | cc0-1.0 | 947 |
#!/usr/bin/env python
#
# Add
## Upvote
## Downvote
# Remove
#
#
import webapp2
import logging, json
from datetime import datetime
from src import rank, config
from webapp2_extras import jinja2, sessions
from src.entities import post as post1
from src.jinja2_factory import jinja2_factory
import random
class BaseHandler(webapp2.RequestHandler):
@webapp2.cached_property
def jinja2(self):
return jinja2.get_jinja2(factory=jinja2_factory)
def render_response(self, filename, **template_args):
self.response.write(self.jinja2.render_template(filename, **template_args))
def dispatch(self):
# Get a session store for this request
self.session_store = sessions.get_store(request = self.request)
try:
# Dispatch request
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
# Return a session using the default cookie key
return self.session_store.get_session()
class MainHandler(BaseHandler):
def get(self):
list = post1.Post().get_by_score()
try:
status = self.session.get_flashes('status')[0][0]
flash = self.session.get_flashes('message')[0][0]
except IndexError:
status, flash = None, None
self.render_response('index.html', list=list, status=status, flash=flash)
class PostHandler(BaseHandler):
def post(self):
if(self.session.get('authorized') == 'True'):
p1 = post1.Post()
p1.populate(title=self.request.get('title'),
description = self.request.get('description'),
upvotes = int(round(random.random() * 100)),
downvotes = int(round(random.random() * 100)),
date_posted = datetime.now())
p1.put()
self.session.add_flash('alert alert-success', key='status')
self.session.add_flash('<strong>Posted</strong> successfully', key='message')
else:
self.session.add_flash('alert alert-error', key='status')
self.session.add_flash('<strong>Cannot post.</strong> Are you authorized?', key='message')
self.redirect(self.uri_for('index'))
class VoteHandler(BaseHandler):
def post(self):
p = post1.Post()
key = self.request.get('value')
votes = 0
if(self.request.get('action') == 'upvote'):
p = p.upvote(key)
votes = p.upvotes
elif(self.request.get('action') == 'downvote'):
p = p.downvote(key)
votes = p.downvotes
self.response.write(json.dumps({ 'count' : votes, 'score' : p.score }))
class LoginHandler(BaseHandler):
def post(self):
if(self.request.get('Login') == 'a correct password'):
self.session['authorized'] = 'True'
self.session.add_flash('alert alert-success', key='status')
self.session.add_flash('Authorized to post messages', key='message')
else:
self.session.add_flash('alert alert-error', key='status')
self.session.add_flash('Incorrect authentication', key='message')
self.redirect('/')
class DeleteHandler(BaseHandler):
def post(self):
logging.debug(self.session.get('authorized'))
if(self.session.get('authorized') == 'True'):
key = self.request.get('key')
post1.Post().delete(key)
self.response.write(json.dumps({}))
else:
self.abort(403)
# if(self.request.get('Login') == 'a correct password'):
# self.session['authorized'] = 'True'
# self.session.add_flash('alert alert-success', key='status')
# self.session.add_flash('Authorized to post messages', key='message')
# else:
# self.session.add_flash('alert alert-error', key='status')
# self.session.add_flash('Incorrect authentication', key='message')
app = webapp2.WSGIApplication([
webapp2.Route('/', MainHandler, name='index'),
('/post', PostHandler),
('/vote', VoteHandler),
('/login', LoginHandler),
('/delete', DeleteHandler)
], debug=True, config=config.config)
| posibyte/wilsonrankgaetest | main.py | Python | mit | 4,464 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: David BEAL, Copyright 2014 Akretion
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp.osv import orm, fields
class AbstractConfigSettings(orm.AbstractModel):
_name = 'abstract.config.settings'
_description = 'Abstract configuration settings'
# prefix field name to differentiate fields in company with those in config
_prefix = 'setting_'
# this is the class name to import in your module
# (it should be ResCompany or res_company, depends of your code)
_companyObject = None
def _filter_field(self, field_key):
"""Inherit in your module to define for which company field
you don't want have a matching related field"""
return True
def __init__(self, pool, cr):
super(AbstractConfigSettings, self).__init__(pool, cr)
if self._companyObject:
for field_key in self._companyObject._columns:
#allows to exclude some field
if self._filter_field(field_key):
args = ('company_id', field_key)
kwargs = {
'string': self._companyObject._columns[field_key].string,
'help': self._companyObject._columns[field_key].help,
'type': self._companyObject._columns[field_key]._type,
}
if '_obj' in self._companyObject._columns[field_key].__dict__.keys():
kwargs['relation'] = \
self._companyObject._columns[field_key]._obj
if '_domain' in \
self._companyObject._columns[field_key].__dict__.keys():
kwargs['domain'] = \
self._companyObject._columns[field_key]._domain
field_key = re.sub('^' + self._prefix, '', field_key)
self._columns[field_key] = \
fields.related(*args, **kwargs)
_columns = {
'company_id': fields.many2one(
'res.company',
'Company',
required=True),
}
def _default_company(self, cr, uid, context=None):
user = self.pool['res.users'].browse(cr, uid, uid, context=context)
return user.company_id.id
_defaults = {
'company_id': _default_company,
}
def field_to_populate_as_related(self, cr, uid, field, company_cols, context=None):
"""Only fields which comes from company with the right prefix
must be defined as related"""
if self._prefix + field in company_cols:
return True
return False
def onchange_company_id(self, cr, uid, ids, company_id, context=None):
" update related fields "
values = {}
values['currency_id'] = False
if not company_id:
return {'value': values}
company_m = self.pool['res.company']
company = company_m.browse(
cr, uid, company_id, context=context)
company_cols = company_m._columns.keys()
for field in self._columns:
if self.field_to_populate_as_related(
cr, uid, field, company_cols, context=context):
cpny_field = self._columns[field].arg[-1]
if self._columns[field]._type == 'many2one':
values[field] = company[cpny_field]['id'] or False
else:
values[field] = company[cpny_field]
return {'value': values}
def create(self, cr, uid, values, context=None):
id = super(AbstractConfigSettings, self).create(
cr, uid, values, context=context)
# Hack: to avoid some nasty bug, related fields are not written
# upon record creation. Hence we write on those fields here.
vals = {}
for fname, field in self._columns.iteritems():
if isinstance(field, fields.related) and fname in values:
vals[fname] = values[fname]
self.write(cr, uid, [id], vals, context)
return id
| hbrunn/server-tools | __unported__/configuration_helper/config.py | Python | agpl-3.0 | 4,900 |
"formatted string with args: {<ref>fst}".format(fst=12) | asedunov/intellij-community | python/testData/resolve/FormatStringKWArgs.py | Python | apache-2.0 | 55 |
# -*- coding: utf-8 -*-
"""
flask.testsuite.regression
~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests regressions.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import gc
import sys
import flask
import threading
import unittest
from werkzeug.exceptions import NotFound
from flask.testsuite import FlaskTestCase
_gc_lock = threading.Lock()
class _NoLeakAsserter(object):
def __init__(self, testcase):
self.testcase = testcase
def __enter__(self):
gc.disable()
_gc_lock.acquire()
loc = flask._request_ctx_stack._local
# Force Python to track this dictionary at all times.
# This is necessary since Python only starts tracking
# dicts if they contain mutable objects. It's a horrible,
# horrible hack but makes this kinda testable.
loc.__storage__['FOOO'] = [1, 2, 3]
gc.collect()
self.old_objects = len(gc.get_objects())
def __exit__(self, exc_type, exc_value, tb):
if not hasattr(sys, 'getrefcount'):
gc.collect()
new_objects = len(gc.get_objects())
if new_objects > self.old_objects:
self.testcase.fail('Example code leaked')
_gc_lock.release()
gc.enable()
class MemoryTestCase(FlaskTestCase):
def assert_no_leak(self):
return _NoLeakAsserter(self)
def test_memory_consumption(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('simple_template.html', whiskey=42)
def fire():
with app.test_client() as c:
rv = c.get('/')
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, b'<h1>42</h1>')
# Trigger caches
fire()
# This test only works on CPython 2.7.
if sys.version_info >= (2, 7) and \
not hasattr(sys, 'pypy_translation_info'):
with self.assert_no_leak():
for x in range(10):
fire()
def test_safe_join_toplevel_pardir(self):
from flask.helpers import safe_join
with self.assert_raises(NotFound):
safe_join('/foo', '..')
class ExceptionTestCase(FlaskTestCase):
def test_aborting(self):
class Foo(Exception):
whatever = 42
app = flask.Flask(__name__)
app.testing = True
@app.errorhandler(Foo)
def handle_foo(e):
return str(e.whatever)
@app.route('/')
def index():
raise flask.abort(flask.redirect(flask.url_for('test')))
@app.route('/test')
def test():
raise Foo()
with app.test_client() as c:
rv = c.get('/')
self.assertEqual(rv.headers['Location'], 'http://localhost/test')
rv = c.get('/test')
self.assertEqual(rv.data, b'42')
def suite():
suite = unittest.TestSuite()
if os.environ.get('RUN_FLASK_MEMORY_TESTS') == '1':
suite.addTest(unittest.makeSuite(MemoryTestCase))
suite.addTest(unittest.makeSuite(ExceptionTestCase))
return suite
| abhishekgahlot/flask | flask/testsuite/regression.py | Python | bsd-3-clause | 3,198 |
from yowsup.common import YowConstants
from yowsup.layers import YowLayerEvent, YowProtocolLayer, EventCallback
from .keystream import KeyStream
from yowsup.common.tools import TimeTools
from .layer_crypt import YowCryptLayer
from yowsup.layers.network import YowNetworkLayer
from .autherror import AuthError
from .protocolentities import *
from yowsup.common.tools import StorageTools
from .layer_interface_authentication import YowAuthenticationProtocolLayerInterface
from yowsup.env import CURRENT_ENV
import base64
class YowAuthenticationProtocolLayer(YowProtocolLayer):
EVENT_LOGIN = "org.openwhatsapp.yowsup.event.auth.login"
EVENT_AUTHED = "org.openwhatsapp.yowsup.event.auth.authed"
PROP_CREDENTIALS = "org.openwhatsapp.yowsup.prop.auth.credentials"
PROP_PASSIVE = "org.openwhatsapp.yowsup.prop.auth.passive"
def __init__(self):
handleMap = {
"stream:features": (self.handleStreamFeatures, None),
"failure": (self.handleFailure, None),
"success": (self.handleSuccess, None),
"challenge": (self.handleChallenge, None),
"stream:error": (self.handleStreamError, None),
}
super(YowAuthenticationProtocolLayer, self).__init__(handleMap)
self.interface = YowAuthenticationProtocolLayerInterface(self)
self.credentials = None #left for backwards-compat
self._credentials = None #new style set
def __str__(self):
return "Authentication Layer"
def __getCredentials(self, credentials = None):
if (credentials == None):
u = self.getProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS)[0]
pb64 = self.getProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS)[1]
else:
u = credentials[0]
pb64 = credentials[1]
if type(pb64) is str:
pb64 = pb64.encode()
password = base64.b64decode(pb64)
return (u, bytearray(password))
def setCredentials(self, credentials):
self.setProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS, credentials) #keep for now
self._credentials = self.__getCredentials(credentials)
def getUsername(self, full = False):
if self._credentials:
return self._credentials[0] if not full else ("%s@%s" % (self._credentials[0], YowConstants.WHATSAPP_SERVER))
else:
prop = self.getProp(YowAuthenticationProtocolLayer.PROP_CREDENTIALS)
return prop[0] if prop else None
@EventCallback(YowNetworkLayer.EVENT_STATE_CONNECTED)
def onConnected(self, yowLayerEvent):
self.login()
## general methods
def login(self):
self.credentials = self._credentials or self.__getCredentials()
self._sendFeatures()
self._sendAuth()
###recieved node handlers handlers
def handleStreamFeatures(self, node):
nodeEntity = StreamFeaturesProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(nodeEntity)
def handleSuccess(self, node):
if(node.data != None): StorageTools.writeNonce(self.credentials[0],node.data)
successEvent = YowLayerEvent(self.__class__.EVENT_AUTHED, passive = self.getProp(self.__class__.PROP_PASSIVE))
self.broadcastEvent(successEvent)
nodeEntity = SuccessProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(nodeEntity)
def handleFailure(self, node):
nodeEntity = FailureProtocolEntity.fromProtocolTreeNode(node)
self.toUpper(nodeEntity)
self.broadcastEvent(YowLayerEvent(YowNetworkLayer.EVENT_STATE_DISCONNECT, reason = "Authentication Failure"))
raise AuthError(nodeEntity.getReason())
def handleChallenge(self, node):
nodeEntity = ChallengeProtocolEntity.fromProtocolTreeNode(node)
self._sendResponse(nodeEntity.getNonce())
def handleStreamError(self, node):
if node.getChild("text"):
nodeEntity = StreamErrorConflictProtocolEntity.fromProtocolTreeNode(node)
elif node.getChild("ack"):
nodeEntity = StreamErrorAckProtocolEntity.fromProtocolTreeNode(node)
else:
raise AuthError("Unhandled stream:error node:\n%s" % node)
self.toUpper(nodeEntity)
##senders
def _sendFeatures(self):
self.entityToLower(StreamFeaturesProtocolEntity(["readreceipts", "groups_v2", "privacy", "presence"]))
def _sendAuth(self):
passive = self.getProp(self.__class__.PROP_PASSIVE, False)
nonce = StorageTools.getNonce(self.credentials[0])
if nonce == None:
self.entityToLower(AuthProtocolEntity(self.credentials[0], passive=passive))
else:
inputKey, outputKey, authBlob = self.generateAuthBlob(nonce)
#to prevent enr whole response
self.broadcastEvent(YowLayerEvent(YowCryptLayer.EVENT_KEYS_READY, keys = (inputKey, None)))
self.entityToLower(AuthProtocolEntity(self.credentials[0], passive=passive, nonce=authBlob))
self.broadcastEvent(YowLayerEvent(YowCryptLayer.EVENT_KEYS_READY, keys = (inputKey, outputKey)))
def _sendResponse(self,nonce):
inputKey, outputKey, authBlob = self.generateAuthBlob(nonce)
responseEntity = ResponseProtocolEntity(authBlob)
#to prevent enr whole response
self.broadcastEvent(YowLayerEvent(YowCryptLayer.EVENT_KEYS_READY, keys = (inputKey, None)))
self.entityToLower(responseEntity)
self.broadcastEvent(YowLayerEvent(YowCryptLayer.EVENT_KEYS_READY, keys = (inputKey, outputKey)))
#YowCryptLayer.setProp("outputKey", outputKey)
def generateAuthBlob(self, nonce):
keys = KeyStream.generateKeys(self.credentials[1], nonce)
inputKey = KeyStream(keys[2], keys[3])
outputKey = KeyStream(keys[0], keys[1])
#YowCryptLayer.setProp("inputKey", inputKey)
nums = bytearray(4)
#nums = [0] * 4
username_bytes = list(map(ord, self.credentials[0]))
nums.extend(username_bytes)
nums.extend(nonce)
utcNow = str(int(TimeTools.utcTimestamp()))
time_bytes = list(map(ord, utcNow))
nums.extend(time_bytes)
strCat = "\x00\x00\x00\x00\x00\x00\x00\x00"
strCat += CURRENT_ENV.getOSVersion() + "\x00"
strCat += CURRENT_ENV.getManufacturer() + "\x00"
strCat += CURRENT_ENV.getDeviceName() + "\x00"
strCat += CURRENT_ENV.getBuildVersion()
nums.extend(list(map(ord, strCat)))
encoded = outputKey.encodeMessage(nums, 0, 4, len(nums) - 4)
authBlob = "".join(map(chr, encoded))
return (inputKey, outputKey, authBlob)
| maruen/yowsup.jlguardi | yowsup/layers/auth/layer_authentication.py | Python | gpl-3.0 | 6,656 |
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == "__main__":
print("Verify that Networkx version is 1.9.1 and above.")
print("Networkx version is: %s" % nx.__version__)
# === Helper functions ===
# Scroll down to network definition
def edges_to_labels(G, flow_dict={}):
d = dict()
for e in G.edges():
edge = G.edge[e[0]][e[1]]
lb = "%g" % 0
cap = ("%g" % edge["capacity"]) if "capacity" in edge else "inf"
cost = ("%g" % edge["weight"]) if "weight" in edge else "0"
sol = " "
if e[0] in flow_dict:
if e[1] in flow_dict[e[0]]:
sol = "%g" % flow_dict[e[0]][e[1]]
d[e] = "%s/%s/%s, %s" % (lb, sol, cap, cost)
return d
def nodes_to_labels(G):
d = dict()
for n in G.nodes():
node = G.node[n]
bal = ("%g" % node["demand"]) if "demand" in node else "0"
d[n] = bal
return d
def draw_graph(G, flow_dict={}):
pos = nx.spectral_layout(G)
nx.draw(G, pos = pos, with_labels=True)
nx.draw_networkx_edge_labels(G, pos, edge_labels=edges_to_labels(G, flow_dict))
offset = (0.02, 0.07)
for p in pos:
pos[p][0] += offset[0]
pos[p][1] += offset[1]
nx.draw_networkx_labels(G, pos, labels=nodes_to_labels(G))
# Show
#plt.draw()
# Alternatively, save as file
plt.savefig("plot-netflows.png")
# === Network definition ===
# Methods:
#G.add_node('a', demand = -5) # demand: balance
#G.add_edge('a', 'b', weight = 3, capacity = 4) # Creates edge with 0/x/capacity, weight
def example_graph():
G = nx.DiGraph()
G.add_node('s', demand=-5)
G.add_edge('s','a', capacity=3.0)
G.add_edge('s','b', capacity=1.0)
G.add_edge('a','c', capacity=3.0)
G.add_edge('b','c', capacity=5.0)
G.add_edge('b','d', capacity=4.0)
G.add_edge('d','e', capacity=2.0)
G.add_edge('c','t', capacity=2.0)
G.add_edge('e','t', capacity=3.0)
return G
# === Network drawing ===
# Write this in IPython (without the #)
#%matplotlib inline
#draw_graph(G)
# Note: matplotlib draws bolded stubs instead of arrow heads.
# === Solve ===
# --- Max flow ---
#flow_value, flow_dict = nx.maximum_flow(G, 's', 't')
#print("Max flow value: %g" % flow_value)
#print("Max flow solution: %s" % str(flow_dict))
# Alternatively, use Ford-Fulkerson alg:
#flow_dict = nx.ford_fulkerson_flow(G, 's', 't')
#flow_value = nx.cost_of_flow(G, flow_dict)
# --- Min cost flow ---
#flow_dict = nx.min_cost_flow(G)
#flow_value = nx.cost_of_flow(G, flow_dict)
# --- Max flow min cost ---
#flow_dict = nx.max_flow_min_cost(G, 's', 't')
#flow_value = nx.cost_of_flow(G, flow_dict)
# --- Using simplex ---
#flow_value, flow_dict = nx.network_simplex(G)
# === Draw result with flow ===
#draw_graph(G, flow_dict)
| PeterSR/DM554 | LP/netflows.py | Python | mit | 2,816 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Base class for QryForms
"""
__docformat__ = 'restructuredtext en'
### IMPORTS ###
from django.forms.util import ErrorList
from django import forms
from relais.dev import mountpoint, enum
from relais.dev.common import *
from relais.webviz.html.simpletag import *
import formrender
__all__ = [
'BaseForm',
]
### CONSTANTS & DEFINES ###
DATA_TYPE = enum.Enum (
'SAMPLE_LIST',
'SAMPLE_GROUPS',
'PHYLOGENY_INSTANCE',
'BIOSEQ_LIST',
)
### IMPLEMENTATION ###
class SafeErrorList (ErrorList):
"""
A simple representation for error lists.
The basic Django error representation does a call that eventually relies
upon the Django configuration machinery. This removes that call and thus
allows up to use the Django forms outside of Django.
"""
def __unicode__ (self):
return self.as_divs()
def as_divs (self):
if not self:
return u''
return u'<div class="errorlist">%s</div>' % ''.join ([u'<div class="error">%s</div>' % e._proxy____args for e in self])
class BaseForm (object):
"""
A class for describing forms for rendering and execution.
A common problem (in the development of Relais) has been the creation of
forms. Too often web frameworks have their own peculiar form frameworks,
which means that forms must be rewritten for every framework in which they
are used. Furthermore, some of these (and more general frameworks) are
found wanting, often at a late stage of development.
The webviz forms are an attempt to get around these problems. They use the
Django (newforms) framework to describe forms internally while providing an
abstract outer API that should be adapatable to most scenarios. It should
be easy if need be to mutate the Django forms or replace them with another
form description framework.
The methods for calling form rendering, exexcution and return of results
can be called seperately or together.
"""
# all these should be overriden by derived classes
id = __module__ + '.BaseForm'.lower()
label='Base form'
description ="This is a description of base form."
submits = ['submit']
output_type = 'This is the sort of results we expect'
resources = []
def __init__ (self, request={}, context=None, action=None):
"""
C'tor for class, feeding data to form and validating it.
In our scheme, a form is created in preparation for rendering,
validation of input and execution.
"""
self._formdef = self.create_form (request, context)
self.valid_data = {}
self.action = action
self.output_data = None
self.output_msgs = []
MSG ("argh")
class FormDef (forms.Form):
pass
@classmethod
def required_resources (cls):
"""
What external resources should be loaded in order to render the form?
"""
return cls.resources
def create_form (self, request, context):
"""
Factory method for creating the internal form definition.
Can be overridden to allow the creation of dynamic forms, with variable
fields or vocabularies.
"""
return self.FormDef (request, error_class=SafeErrorList)
def render_body (self):
"""
Render the body (fields) of the form in HTML, excluding submit buttons.
"""
return formrender.form_as_table (self._formdef)
def render_form (self):
"""
Render the entire form in HTML, including submit buttons and form tags.
"""
text = u''
text += start_tag ('form', method="post",
enctype="multipart/form-data") + '\n'
text += self.render_body ()
text += self.render_submits()
text += stop_tag ('form') + '\n'
return text
def render_submits (self):
"""
Render the submit buttons of a form in HTML.
"""
text = u''
text += start_tag ('div', class_="formControls") + '\n'
text += start_tag ('input', type_="hidden", name="_form_id",
value=self.id) + '\n'
text += start_tag ('input', type_="hidden", name="form.submitted",
value="1") + '\n'
for btn in self.submits:
text += start_tag ('input', class_="context", type_="submit",
name="form_submit", value=btn) + '\n'
text += stop_tag ('div') + '\n'
return text
def render_output (self):
"""
Render the results of a form execution.
"""
return self.render_output_msgs() + self.render_output_data()
def render_output_msgs (self):
return u"<P>executed</P>"
def render_output_data (self):
return u"output"
def is_valid (self):
return self._formdef.is_valid()
def execute (self, context):
"""
Do the actual operation.
"""
## Preconditions:
assert (self.is_valid())
debug.MSG (context=context, ctype=type(context))
## Main:
submit_button = request.get ('submit')
if (submit_button):
execute_fxn_name = 'execute_' + submit_button.lower()
if (hasattr (self, execute_fxn_name)):
execute_fxn = getattr (self, execute_fxn_name)
return execute_fxn (context)
return self.execute_default (context)
def execute_default (self, context):
self.output_msgs.append ('foo')
return "no operation here"
### TEST & DEBUG ###
def _doctest ():
import doctest
doctest.testmod ()
### MAIN ###
if __name__ == '__main__':
_doctest()
### END ######################################################################
| agapow/relais.webviz | relais/webviz/forms/baseform.py | Python | mit | 5,186 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for multi worker Collective Operations."""
import copy
import os
import threading
import time
from absl.testing import parameterized
from tensorflow.core.protobuf import tensorflow_server_pb2
from tensorflow.python.distribute import cluster_resolver as cluster_resolver_lib
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import multi_process_runner
from tensorflow.python.distribute import multi_worker_test_base
from tensorflow.python.eager import context
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import collective_ops
def enable_collective_ops(cluster_resolver):
context.context().configure_collective_ops(
collective_leader="/job:worker/replica:0/task:0")
config_proto = copy.deepcopy(context.context().config)
server_def = tensorflow_server_pb2.ServerDef(
cluster=cluster_resolver.cluster_spec().as_cluster_def(),
default_session_config=config_proto,
job_name=cluster_resolver.task_type,
task_index=cluster_resolver.task_id,
protocol=cluster_resolver.rpc_layer or "grpc")
context.context().enable_collective_ops(server_def)
def enable_collective_ops_with_barrier(cluster_resolver):
multi_process_runner.get_barrier().wait()
enable_collective_ops(cluster_resolver)
multi_process_runner.get_barrier().wait()
device_combination = (
combinations.combine(device="CPU", communication="RING", required_gpus=0) +
combinations.combine(
device="GPU", communication=["RING", "NCCL"], required_gpus=1))
class CollectiveOpTest(test.TestCase):
def testCheckHealth(self):
def worker_fn():
enable_collective_ops(cluster_resolver_lib.TFConfigClusterResolver())
# There may be some delays before the server startup. Check health should
# eventually be OK.
while True:
try:
for task in [
"/job:worker/replica:0/task:0",
"/job:worker/replica:0/task:1",
]:
context.context().check_collective_ops_peer_health(
task, timeout_in_ms=1000)
except (errors.UnavailableError, errors.DeadlineExceededError):
continue
break
multi_process_runner.get_barrier().wait()
cluster_spec = multi_worker_test_base.create_cluster_spec(num_workers=2)
mpr = multi_process_runner.MultiProcessRunner(worker_fn, cluster_spec)
mpr.start()
mpr.join()
def testCheckHealthPeerDown(self):
def worker_fn():
enable_collective_ops(cluster_resolver_lib.TFConfigClusterResolver())
context.context().check_collective_ops_peer_health(
"/job:worker/replica:0/task:1", timeout_in_ms=1000)
cluster_spec = multi_worker_test_base.create_cluster_spec(num_workers=2)
mpr = multi_process_runner.MultiProcessRunner(worker_fn, cluster_spec)
mpr.start_single_process("worker", 0)
with self.assertRaises(
(errors.UnavailableError, errors.DeadlineExceededError)):
mpr.join()
def testCheckHealthPeerRestart(self):
def worker_fn():
cluster_resolver = cluster_resolver_lib.TFConfigClusterResolver()
enable_collective_ops(cluster_resolver)
collective_ops.all_reduce(
constant_op.constant(1.),
group_size=2,
group_key=100,
instance_key=100,
merge_op="Add",
final_op="Id",
communication_hint="ring")
if cluster_resolver.task_type == "worker":
# MultiProcessRunner will auto restart worker-0.
os._exit(1) # pylint: disable=protected-access
else:
# chief should eventually gets FailedPreconditionError after worker-0
# has restarted.
while True:
time.sleep(1)
try:
context.context().check_collective_ops_peer_health(
"/job:worker/replica:0/task:0", timeout_in_ms=1000)
except errors.UnavailableError:
pass
except errors.FailedPreconditionError:
break
cluster_spec = multi_worker_test_base.create_cluster_spec(
has_chief=True, num_workers=1)
mpr = multi_process_runner.MultiProcessRunner(
worker_fn, cluster_spec, auto_restart=True)
mpr.start()
mpr.join()
def testCheckHealthInvalidPeer(self):
def worker_fn():
enable_collective_ops(cluster_resolver_lib.TFConfigClusterResolver())
context.context().check_collective_ops_peer_health(
"localhost:12345", timeout_in_ms=1000)
cluster_spec = multi_worker_test_base.create_cluster_spec(num_workers=2)
mpr = multi_process_runner.MultiProcessRunner(worker_fn, cluster_spec)
mpr.start_single_process("worker", 0)
with self.assertRaises(errors.InvalidArgumentError):
mpr.join()
two_worker_pool_runner = multi_process_runner.MultiProcessPoolRunner(
multi_worker_test_base.create_cluster_spec(num_workers=2),
initializer=lambda: enable_collective_ops(cluster_resolver_lib.
TFConfigClusterResolver()))
@combinations.generate(
combinations.times(
combinations.combine(
mode="eager", num_workers=2, runner=two_worker_pool_runner),
device_combination))
class AbortCollectiveOpsTest(test.TestCase, parameterized.TestCase):
def testAbortCommunication(self, device, communication):
if communication == "NCCL":
self.skipTest("b/171358086: cannot test multi worker NCCL")
dev0 = "/device:%s:0" % device
cluster_resolver = cluster_resolver_lib.TFConfigClusterResolver()
enable_collective_ops_with_barrier(cluster_resolver)
group_size = 2
group_key = 100
instance_key = 100
in_tensor = constant_op.constant([1.])
# First perform a normal all-reduce to complete the group and instance
# resolution.
with ops.device(dev0):
collective_ops.all_reduce(
in_tensor,
group_size,
group_key,
instance_key,
communication_hint=communication)
if cluster_resolver.task_id == 1:
def abort_fn():
time.sleep(2)
context.context().abort_collective_ops(errors.UNAVAILABLE, "peer down")
t = threading.Thread(target=abort_fn)
t.start()
with self.assertRaisesRegex(errors.UnavailableError, "peer down"):
with ops.device(dev0):
collective_ops.all_reduce(
in_tensor,
group_size,
group_key,
instance_key,
communication_hint=communication)
# After abortion, subsequent collectives should fail immediately.
with self.assertRaisesRegex(errors.UnavailableError, "peer down"):
with ops.device(dev0):
collective_ops.all_reduce(
in_tensor,
group_size,
group_key,
instance_key,
communication_hint=communication)
t.join()
# Enable collective ops again in order to reset the collective executor.
enable_collective_ops_with_barrier(cluster_resolver)
with ops.device(dev0):
collective_ops.all_reduce(
in_tensor,
group_size,
group_key,
instance_key,
communication_hint=communication)
def testAbortGroupParamsResolution(self, device, communication):
if communication == "NCCL":
self.skipTest("b/171358086: cannot test multi worker NCCL")
dev0 = "/device:%s:0" % device
cluster_resolver = cluster_resolver_lib.TFConfigClusterResolver()
enable_collective_ops_with_barrier(cluster_resolver)
group_size = 2
group_key = 100
instance_key = 100
in_tensor = constant_op.constant([1.])
if cluster_resolver.task_id == 1:
def abort_fn():
time.sleep(2)
context.context().abort_collective_ops(errors.UNAVAILABLE, "peer down")
t = threading.Thread(target=abort_fn)
t.start()
with self.assertRaisesRegex(errors.UnavailableError, "peer down"):
# This hangs on params resolution since we're only launching one
# collective for a group size of 2.
with ops.device(dev0):
collective_ops.all_reduce(in_tensor, group_size, group_key,
instance_key)
# After abortion, subsequent collectives should fail immediately.
with self.assertRaisesRegex(errors.UnavailableError, "peer down"):
with ops.device(dev0):
collective_ops.all_reduce(in_tensor, group_size, group_key,
instance_key)
t.join()
# Enable collective ops again in order to reset the collective executor.
enable_collective_ops_with_barrier(cluster_resolver)
with ops.device(dev0):
collective_ops.all_reduce(in_tensor, group_size, group_key, instance_key)
def testAbortInstanceParamsResolution(self, device, communication):
if communication == "NCCL":
self.skipTest("b/171358086: cannot test multi worker NCCL")
dev0 = "/device:%s:0" % device
cluster_resolver = cluster_resolver_lib.TFConfigClusterResolver()
enable_collective_ops_with_barrier(cluster_resolver)
group_size = 2
group_key = 100
instance_key = 100
in_tensor = constant_op.constant([1.])
# First perform a normal all-reduce to complete the group resolution.
with ops.device(dev0):
collective_ops.all_reduce(in_tensor, group_size, group_key, instance_key)
# We use broadcast to test aborting instance resolution since only broadcast
# waits for the group.
if cluster_resolver.task_id == 1:
def abort_fn():
time.sleep(2)
context.context().abort_collective_ops(errors.UNAVAILABLE, "peer down")
t = threading.Thread(target=abort_fn)
t.start()
# Use a different instance key to trigger another instance resolution.
instance_key = 101
with self.assertRaisesRegex(errors.UnavailableError, "peer down"):
# This hangs on params resolution since we're only launching one
# collective for a group size of 2.
with ops.device(dev0):
collective_ops.broadcast_send(in_tensor, (1,), dtypes.float32,
group_size, group_key, instance_key)
# After abortion, subsequent collectives should fail immediately.
with self.assertRaisesRegex(errors.UnavailableError, "peer down"):
with ops.device(dev0):
collective_ops.broadcast_send(in_tensor, (1,), dtypes.float32,
group_size, group_key, instance_key)
t.join()
# Enable collective ops again in order to reset the collective executor.
enable_collective_ops_with_barrier(cluster_resolver)
# Reassign instance_key so that it's the same on each worker.
instance_key = 100
with ops.device(dev0):
if cluster_resolver.task_id == 0:
collective_ops.broadcast_send(in_tensor, (1,), dtypes.float32,
group_size, group_key, instance_key)
else:
collective_ops.broadcast_recv((1,), dtypes.float32, group_size,
group_key, instance_key)
if __name__ == "__main__":
multi_process_runner.test_main()
| tensorflow/tensorflow | tensorflow/python/kernel_tests/collective_ops_multi_worker_test.py | Python | apache-2.0 | 12,067 |
from __future__ import unicode_literals
from alliance_auth.celeryapp import app
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from notifications import notify
from .manager import IPBoardManager
from .models import IpboardUser
import logging
logger = logging.getLogger(__name__)
class IpboardTasks:
def __init__(self):
pass
@classmethod
def delete_user(cls, user, notify_user=False):
if cls.has_account(user):
if IPBoardManager.disable_user(user.ipboard.username):
user.ipboard.delete()
if notify_user:
notify(user, 'IPBoard Account Disabled', level='danger')
return True
return False
@staticmethod
def has_account(user):
try:
return user.ipboard.username != ''
except ObjectDoesNotExist:
return False
@staticmethod
@app.task(bind=True, name='ipboard.update_groups')
def update_groups(self, pk):
user = User.objects.get(pk=pk)
logger.debug("Updating user %s ipboard groups." % user)
groups = []
for group in user.groups.all():
groups.append(str(group.name))
if len(groups) == 0:
groups.append('empty')
logger.debug("Updating user %s ipboard groups to %s" % (user, groups))
try:
IPBoardManager.update_groups(user.ipboard.username, groups)
except:
logger.exception("IPBoard group sync failed for %s, retrying in 10 mins" % user)
raise self.retry(countdown=60 * 10)
logger.debug("Updated user %s ipboard groups." % user)
@staticmethod
@app.task(name='ipboard.update_all_groups')
def update_all_groups():
logger.debug("Updating ALL ipboard groups")
for ipboard_user in IpboardUser.objects.exclude(username__exact=''):
IpboardTasks.update_groups.delay(ipboard_user.user.pk)
@staticmethod
def disable():
logger.debug("Deleting all Ipboard Users")
IpboardUser.objects.all().delete()
| iAddz/allianceauth | services/modules/ipboard/tasks.py | Python | gpl-2.0 | 2,144 |
# -*- coding: utf-8 -*-
from pyload.plugin.internal.DeadCrypter import DeadCrypter
class NetfolderIn(DeadCrypter):
__name = "NetfolderIn"
__type = "crypter"
__version = "0.73"
__pattern = r'http://(?:www\.)?netfolder\.(in|me)/(folder\.php\?folder_id=)?(?P<ID>\w+)(?(1)|/\w+)'
__config = [] #@TODO: Remove in 0.4.10
__description = """NetFolder.in decrypter plugin"""
__license = "GPLv3"
__authors = [("RaNaN", "[email protected]"),
("fragonib", "fragonib[AT]yahoo[DOT]es")]
| ardi69/pyload-0.4.10 | pyload/plugin/crypter/NetfolderIn.py | Python | gpl-3.0 | 550 |
# Copyright 2017 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.volume.v2 import scheduler_stats_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestSchedulerStatsClient(base.BaseServiceTest):
FAKE_POOLS_LIST = {
"pools": [
{
"name": "pool1",
"capabilities": {
"updated": "2014-10-28T00:00:00-00:00",
"total_capacity": 1024,
"free_capacity": 100,
"volume_backend_name": "pool1",
"reserved_percentage": 0,
"driver_version": "1.0.0",
"storage_protocol": "iSCSI",
"QoS_support": False
}
},
{
"name": "pool2",
"capabilities": {
"updated": "2014-10-28T00:00:00-00:00",
"total_capacity": 512,
"free_capacity": 200,
"volume_backend_name": "pool2",
"reserved_percentage": 0,
"driver_version": "1.0.2",
"storage_protocol": "iSER",
"QoS_support": True
}
}
]
}
def setUp(self):
super(TestSchedulerStatsClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = scheduler_stats_client.SchedulerStatsClient(
fake_auth, 'volume', 'regionOne')
def _test_list_pools(self, bytes_body=False, detail=False):
resp_body = []
if detail:
resp_body = self.FAKE_POOLS_LIST
else:
resp_body = {'pools': [{'name': pool['name']}
for pool in self.FAKE_POOLS_LIST['pools']]}
self.check_service_client_function(
self.client.list_pools,
'tempest.lib.common.rest_client.RestClient.get',
resp_body,
bytes_body,
detail=detail)
def test_list_pools_with_str_body(self):
self._test_list_pools()
def test_list_pools_with_str_body_and_detail(self):
self._test_list_pools(detail=True)
def test_list_pools_with_bytes_body(self):
self._test_list_pools(bytes_body=True)
def test_list_pools_with_bytes_body_and_detail(self):
self._test_list_pools(bytes_body=True, detail=True)
| vedujoshi/tempest | tempest/tests/lib/services/volume/v2/test_scheduler_stats_client.py | Python | apache-2.0 | 3,040 |
from django.db import models
def content_file_name(request, filename):
return '/'.join([request.POST['dirName'], filename])
class Document(models.Model):
dirPath = models.CharField(max_length=1000, default="abc")
docfile = models.FileField(upload_to=content_file_name)
| draperlaboratory/stout | uploads/models.py | Python | apache-2.0 | 274 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cross-language tests for the DeterministicAead primitive."""
from typing import Iterable
from absl.testing import absltest
from absl.testing import parameterized
import tink
from tink import daead
from tink.testing import keyset_builder
from util import supported_key_types
from util import testing_servers
SUPPORTED_LANGUAGES = testing_servers.SUPPORTED_LANGUAGES_BY_PRIMITIVE['daead']
def setUpModule():
daead.register()
testing_servers.start('deterministic_aead')
def tearDownModule():
testing_servers.stop()
def all_deterministic_aead_key_template_names() -> Iterable[str]:
"""Yields all Deterministic AEAD key template names."""
for key_type in supported_key_types.DAEAD_KEY_TYPES:
for key_template_name in supported_key_types.KEY_TEMPLATE_NAMES[key_type]:
yield key_template_name
class DeterministicAeadTest(parameterized.TestCase):
@parameterized.parameters(all_deterministic_aead_key_template_names())
def test_encrypt_decrypt(self, key_template_name):
supported_langs = supported_key_types.SUPPORTED_LANGUAGES_BY_TEMPLATE_NAME[
key_template_name]
self.assertNotEmpty(supported_langs)
key_template = supported_key_types.KEY_TEMPLATE[key_template_name]
# Take the first supported language to generate the keyset.
keyset = testing_servers.new_keyset(supported_langs[0], key_template)
supported_daeads = [
testing_servers.deterministic_aead(lang, keyset)
for lang in supported_langs
]
self.assertNotEmpty(supported_daeads)
unsupported_daeads = [
testing_servers.deterministic_aead(lang, keyset)
for lang in SUPPORTED_LANGUAGES
if lang not in supported_langs
]
plaintext = (
b'This is some plaintext message to be encrypted using '
b'key_template %s.' % key_template_name.encode('utf8'))
associated_data = (
b'Some associated data for %s.' % key_template_name.encode('utf8'))
ciphertext = None
for p in supported_daeads:
if ciphertext:
self.assertEqual(
ciphertext,
p.encrypt_deterministically(plaintext, associated_data))
else:
ciphertext = p.encrypt_deterministically(plaintext, associated_data)
for p2 in supported_daeads:
output = p2.decrypt_deterministically(ciphertext, associated_data)
self.assertEqual(output, plaintext)
for p2 in unsupported_daeads:
with self.assertRaises(
tink.TinkError,
msg='Language %s supports decrypt_deterministically with %s '
'unexpectedly' % (p2.lang, key_template_name)):
p2.decrypt_deterministically(ciphertext, associated_data)
for p in unsupported_daeads:
with self.assertRaises(
tink.TinkError,
msg='Language %s supports encrypt_deterministically with %s '
'unexpectedly' % (p.lang, key_template_name)):
p.encrypt_deterministically(b'plaintext', b'associated_data')
# If the implementations work fine for keysets with single keys, then key
# rotation should work if the primitive wrapper is implemented correctly.
# These wrappers do not depend on the key type, so it should be fine to always
# test with the same key type. But since the wrapper needs to treat keys
# with output prefix RAW differently, we also include such a template for that.
KEY_ROTATION_TEMPLATES = [
daead.deterministic_aead_key_templates.AES256_SIV,
keyset_builder.raw_template(
daead.deterministic_aead_key_templates.AES256_SIV)
]
def key_rotation_test_cases():
for enc_lang in SUPPORTED_LANGUAGES:
for dec_lang in SUPPORTED_LANGUAGES:
for old_key_tmpl in KEY_ROTATION_TEMPLATES:
for new_key_tmpl in KEY_ROTATION_TEMPLATES:
yield (enc_lang, dec_lang, old_key_tmpl, new_key_tmpl)
class DaeadKeyRotationTest(parameterized.TestCase):
@parameterized.parameters(key_rotation_test_cases())
def test_key_rotation(self, enc_lang, dec_lang, old_key_tmpl, new_key_tmpl):
# Do a key rotation from an old key generated from old_key_tmpl to a new
# key generated from new_key_tmpl. Encryption and decryption are done
# in languages enc_lang and dec_lang.
builder = keyset_builder.new_keyset_builder()
older_key_id = builder.add_new_key(old_key_tmpl)
builder.set_primary_key(older_key_id)
enc_daead1 = testing_servers.deterministic_aead(enc_lang, builder.keyset())
dec_daead1 = testing_servers.deterministic_aead(dec_lang, builder.keyset())
newer_key_id = builder.add_new_key(new_key_tmpl)
enc_daead2 = testing_servers.deterministic_aead(enc_lang, builder.keyset())
dec_daead2 = testing_servers.deterministic_aead(dec_lang, builder.keyset())
builder.set_primary_key(newer_key_id)
enc_daead3 = testing_servers.deterministic_aead(enc_lang, builder.keyset())
dec_daead3 = testing_servers.deterministic_aead(dec_lang, builder.keyset())
builder.disable_key(older_key_id)
enc_daead4 = testing_servers.deterministic_aead(enc_lang, builder.keyset())
dec_daead4 = testing_servers.deterministic_aead(dec_lang, builder.keyset())
self.assertNotEqual(older_key_id, newer_key_id)
# 1 encrypts with the older key. So 1, 2 and 3 can decrypt it, but not 4.
ciphertext1 = enc_daead1.encrypt_deterministically(b'plaintext', b'ad')
self.assertEqual(dec_daead1.decrypt_deterministically(ciphertext1, b'ad'),
b'plaintext')
self.assertEqual(dec_daead2.decrypt_deterministically(ciphertext1, b'ad'),
b'plaintext')
self.assertEqual(dec_daead3.decrypt_deterministically(ciphertext1, b'ad'),
b'plaintext')
with self.assertRaises(tink.TinkError):
_ = dec_daead4.decrypt_deterministically(ciphertext1, b'ad')
# 2 encrypts with the older key. So 1, 2 and 3 can decrypt it, but not 4.
ciphertext2 = enc_daead2.encrypt_deterministically(b'plaintext', b'ad')
self.assertEqual(dec_daead1.decrypt_deterministically(ciphertext2, b'ad'),
b'plaintext')
self.assertEqual(dec_daead2.decrypt_deterministically(ciphertext2, b'ad'),
b'plaintext')
self.assertEqual(dec_daead3.decrypt_deterministically(ciphertext2, b'ad'),
b'plaintext')
with self.assertRaises(tink.TinkError):
_ = dec_daead4.decrypt_deterministically(ciphertext2, b'ad')
# 3 encrypts with the newer key. So 2, 3 and 4 can decrypt it, but not 1.
ciphertext3 = enc_daead3.encrypt_deterministically(b'plaintext', b'ad')
with self.assertRaises(tink.TinkError):
_ = dec_daead1.decrypt_deterministically(ciphertext3, b'ad')
self.assertEqual(dec_daead2.decrypt_deterministically(ciphertext3, b'ad'),
b'plaintext')
self.assertEqual(dec_daead3.decrypt_deterministically(ciphertext3, b'ad'),
b'plaintext')
self.assertEqual(dec_daead4.decrypt_deterministically(ciphertext3, b'ad'),
b'plaintext')
# 4 encrypts with the newer key. So 2, 3 and 4 can decrypt it, but not 1.
ciphertext4 = enc_daead4.encrypt_deterministically(b'plaintext', b'ad')
with self.assertRaises(tink.TinkError):
_ = dec_daead1.decrypt_deterministically(ciphertext4, b'ad')
self.assertEqual(dec_daead2.decrypt_deterministically(ciphertext4, b'ad'),
b'plaintext')
self.assertEqual(dec_daead3.decrypt_deterministically(ciphertext4, b'ad'),
b'plaintext')
self.assertEqual(dec_daead4.decrypt_deterministically(ciphertext4, b'ad'),
b'plaintext')
if __name__ == '__main__':
absltest.main()
| google/tink | testing/cross_language/deterministic_aead_test.py | Python | apache-2.0 | 8,200 |
from __future__ import print_function
import os
from flask import Flask, render_template, url_for, request, jsonify
app = Flask(__name__)
@app.route('/',)
def homepage():
viewer = 'clinician'
description = 'This is the clinician version.'
return render_template('index.html', viewer=viewer, description=description)
@app.route('/patient')
def patientpage():
viewer = 'patient'
description = 'This is the patient version.'
return render_template('index.html', viewer=viewer, description=description)
@app.route('/echo/', methods=['POST'])
def echo():
return_data = {"value": request.json['userInputs']}
return jsonify(return_data)
@app.route('/about')
def aboutpage():
viewer = 'about'
return render_template('about.html', viewer=viewer)
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5030))
app.run(host='0.0.0.0', port=port, debug=True, use_reloader=True) | daviszhou/ascvd-webapp | __init__.py | Python | mit | 898 |
from django.contrib import admin
from .models import ItemLink
class ItemLinkAdmin(admin.ModelAdmin):
search_fields = ["title", "description", "link"]
admin.site.register(ItemLink, ItemLinkAdmin)
| daviferreira/leticiastallone.com | leticiastallone/links/admin.py | Python | mit | 202 |
import os
import cPickle as pickle
home_dir = os.path.expanduser('~')
DATA_DIR = os.path.join(home_dir, '.tax_resolve')
if not os.path.exists(DATA_DIR):
try:
os.mkdir(DATA_DIR)
except: DATA_DIR = os.getcwd()
cache_path = lambda name: os.path.join(DATA_DIR, '%s.cache' % name)
def get_cache(name):
return pickle.load(open(cache_path(name), 'r'))
def save_cache(obj, name):
pickle.dump(obj, open(cache_path(name), 'w'), protocol=-1)
| bendmorris/tax_resolve | caching.py | Python | mit | 464 |
from kentauros.context import KtrContext
from kentauros.package import KtrPackage
from kentauros.result import KtrResult
from .module import KtrModule
class PackageModule(KtrModule):
NAME = "Package"
def __init__(self, package: KtrPackage, context: KtrContext):
super().__init__(package, context)
self.actions["chain"] = self.execute
def name(self):
return "{} {}".format(self.NAME, self.package.name)
def __str__(self) -> str:
return "{} '{}'".format(self.NAME, self.package.name)
def execute(self) -> KtrResult:
return KtrResult(True)
def clean(self) -> KtrResult:
return KtrResult(True)
def imports(self) -> KtrResult:
return KtrResult(True)
def status(self) -> KtrResult:
return KtrResult(True)
def status_string(self) -> KtrResult:
ret = KtrResult()
res = self.package.status_string()
ret.collect(res)
ret.value = res.value
return ret.submit(res.success)
def verify(self) -> KtrResult:
ret = KtrResult()
res = self.package.verify()
ret.collect(res)
return ret.submit(res.success)
| decathorpe/kentauros | kentauros/modules/package.py | Python | gpl-2.0 | 1,177 |
"""
The MIT License
Copyright (c) 2007 Leah Culver
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import cgi
import urllib
import time
import random
import urlparse
import hmac
import binascii
from xml.dom.minidom import parseString
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
USER_AGENT = 'BarrioSquare/0.1.10 (Maemo5; Linux arm7vl) (http://chilitechno.com/fster/)'
class OAuthError(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occured.'):
self.message = message
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s, safe='~')
def _utf8_str(s):
"""Convert unicode to utf-8."""
if isinstance(s, unicode):
return s.encode("utf-8")
else:
return str(s)
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class OAuthConsumer(object):
"""Consumer of OAuth authentication.
OAuthConsumer is a data type that represents the identity of the Consumer
via its shared secret with the Service Provider.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
class OAuthToken(object):
"""OAuthToken is a data type that represents an End User via either an access
or request token.
key -- the token
secret -- the token secret
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
def from_string(s):
print s
""" Returns a token from something like:
oauth_token_secret=xxx&oauth_token=xxx
"""
dom3 = parseString(s)
key = None
try:
key = dom3.getElementsByTagName('oauth_token')[0].firstChild.data
except IndexError:
key = None
print key
secret = None
try:
secret = dom3.getElementsByTagName('oauth_token_secret')[0].firstChild.data
except IndexError:
secret = None
print secret
if key != None and secret != None:
token = OAuthToken(key, secret)
else:
token = None
#try:
# token.callback_confirmed = params['oauth_callback_confirmed'][0]
#except KeyError:
# pass # 1.0, no callback confirmed.
return token
from_string = staticmethod(from_string)
def __str__(self):
return self.to_string()
class OAuthRequest(object):
"""OAuthRequest represents the request and can be serialized.
OAuth parameters:
- oauth_consumer_key
- oauth_token
- oauth_signature_method
- oauth_signature
- oauth_timestamp
- oauth_nonce
- oauth_version
- oauth_verifier
... any additional parameters, as defined by the Service Provider.
"""
parameters = None # OAuth parameters.
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
self.http_method = http_method
self.http_url = http_url
self.parameters = parameters or {}
def set_parameter(self, parameter, value):
self.parameters[parameter] = value
def get_parameter(self, parameter):
try:
return self.parameters[parameter]
except:
raise OAuthError('Parameter not found: %s' % parameter)
def _get_timestamp_nonce(self):
return self.get_parameter('oauth_timestamp'), self.get_parameter(
'oauth_nonce')
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
parameters = {}
for k, v in self.parameters.iteritems():
# Ignore oauth parameters.
if k.find('oauth_') < 0:
parameters[k] = v
return parameters
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
auth_header = 'OAuth realm="%s"' % realm
# Add the oauth parameters.
if self.parameters:
for k, v in self.parameters.iteritems():
if k[:6] == 'oauth_':
auth_header += ', %s="%s"' % (k, escape(str(v)))
if k[:3] == 'fs_':
auth_header += ', %s="%s"' % (k, escape(str(v)))
if k[:3] == 'geo':
auth_header += ', %s="%s"' % (k, escape(str(v)))
return {'Authorization': auth_header, 'User-Agent': USER_AGENT}
def to_postdata(self):
"""Serialize as post data for a POST request."""
return '&'.join(['%s=%s' % (escape(str(k)), escape(str(v))) \
for k, v in self.parameters.iteritems()])
def to_url(self):
"""Serialize as a URL for a GET request."""
return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
params = self.parameters
try:
# Exclude the signature if it exists.
del params['oauth_signature']
except:
pass
# Escape key values before sorting.
key_values = [(escape(_utf8_str(k)), escape(_utf8_str(v))) \
for k,v in params.items()]
# Sort lexicographically, first after key, then after value.
key_values.sort()
# Combine key value pairs into a string.
return '&'.join(['%s=%s' % (k, v) for k, v in key_values])
def get_normalized_http_method(self):
"""Uppercases the http method."""
return self.http_method.upper()
def get_normalized_http_url(self):
"""Parses the URL and rebuilds it to be scheme://host/path."""
parts = urlparse.urlparse(self.http_url)
scheme, netloc, path = parts[:3]
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
return '%s://%s%s' % (scheme, netloc, path)
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of build_signature."""
# Set the signature method.
self.set_parameter('oauth_signature_method',
signature_method.get_name())
# Set the signature.
self.set_parameter('oauth_signature',
self.build_signature(signature_method, consumer, token))
def build_signature(self, signature_method, consumer, token):
"""Calls the build signature method within the signature method."""
return signature_method.build_signature(self, consumer, token)
def from_request(http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = OAuthRequest._split_header(auth_header)
parameters.update(header_params)
except:
raise OAuthError('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = OAuthRequest._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = OAuthRequest._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return OAuthRequest(http_method, http_url, parameters)
return None
from_request = staticmethod(from_request)
def from_consumer_and_token(oauth_consumer, token=None,
callback=None, verifier=None, http_method=HTTP_METHOD,
http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': oauth_consumer.key,
'oauth_timestamp': generate_timestamp(),
'oauth_nonce': generate_nonce(),
'oauth_version': OAuthRequest.version,
}
defaults.update(parameters)
parameters = defaults
print parameters
if token:
parameters['oauth_token'] = token.key
if token.callback:
parameters['oauth_callback'] = token.callback
# 1.0a support for verifier.
if verifier:
parameters['oauth_verifier'] = verifier
elif callback:
# 1.0a support for callback in the request token request.
parameters['oauth_callback'] = callback
return OAuthRequest(http_method, http_url, parameters)
from_consumer_and_token = staticmethod(from_consumer_and_token)
def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD,
http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return OAuthRequest(http_method, http_url, parameters)
from_token_and_callback = staticmethod(from_token_and_callback)
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
_split_header = staticmethod(_split_header)
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = cgi.parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
_split_url_string = staticmethod(_split_url_string)
class OAuthServer(object):
"""A worker to check the validity of a request against a data store."""
timestamp_threshold = 300 # In seconds, five minutes.
version = VERSION
signature_methods = None
data_store = None
def __init__(self, data_store=None, signature_methods=None):
self.data_store = data_store
self.signature_methods = signature_methods or {}
def set_data_store(self, data_store):
self.data_store = data_store
def get_data_store(self):
return self.data_store
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.get_name()] = signature_method
return self.signature_methods
def fetch_request_token(self, oauth_request):
"""Processes a request_token request and returns the
request token on success.
"""
try:
# Get the request token for authorization.
token = self._get_token(oauth_request, 'request')
except OAuthError:
# No token required for the initial token request.
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
try:
callback = self.get_callback(oauth_request)
except OAuthError:
callback = None # 1.0, no callback specified.
self._check_signature(oauth_request, consumer, None)
# Fetch a new token.
token = self.data_store.fetch_request_token(consumer, callback)
return token
def fetch_access_token(self, oauth_request):
"""Processes an access_token request and returns the
access token on success.
"""
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
try:
verifier = self._get_verifier(oauth_request)
except OAuthError:
verifier = None
# Get the request token.
token = self._get_token(oauth_request, 'request')
self._check_signature(oauth_request, consumer, token)
new_token = self.data_store.fetch_access_token(consumer, token, verifier)
return new_token
def verify_request(self, oauth_request):
"""Verifies an api call and checks all the parameters."""
# -> consumer and token
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# Get the access token.
token = self._get_token(oauth_request, 'access')
self._check_signature(oauth_request, consumer, token)
parameters = oauth_request.get_nonoauth_parameters()
return consumer, token, parameters
def authorize_token(self, token, user):
"""Authorize a request token."""
return self.data_store.authorize_request_token(token, user)
def get_callback(self, oauth_request):
"""Get the callback URL."""
return oauth_request.get_parameter('oauth_callback')
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _get_version(self, oauth_request):
"""Verify the correct version request for this server."""
try:
version = oauth_request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise OAuthError('OAuth version %s not supported.' % str(version))
return version
def _get_signature_method(self, oauth_request):
"""Figure out the signature with some defaults."""
try:
signature_method = oauth_request.get_parameter(
'oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise OAuthError('Signature method %s not supported try one of the '
'following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_consumer(self, oauth_request):
consumer_key = oauth_request.get_parameter('oauth_consumer_key')
consumer = self.data_store.lookup_consumer(consumer_key)
if not consumer:
raise OAuthError('Invalid consumer.')
return consumer
def _get_token(self, oauth_request, token_type='access'):
"""Try to find the token for the provided request token key."""
token_field = oauth_request.get_parameter('oauth_token')
token = self.data_store.lookup_token(token_type, token_field)
if not token:
raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
return token
def _get_verifier(self, oauth_request):
return oauth_request.get_parameter('oauth_verifier')
def _check_signature(self, oauth_request, consumer, token):
timestamp, nonce = oauth_request._get_timestamp_nonce()
self._check_timestamp(timestamp)
self._check_nonce(consumer, token, nonce)
signature_method = self._get_signature_method(oauth_request)
try:
signature = oauth_request.get_parameter('oauth_signature')
except:
raise OAuthError('Missing signature.')
# Validate the signature.
valid_sig = signature_method.check_signature(oauth_request, consumer,
token, signature)
if not valid_sig:
key, base = signature_method.build_signature_base_string(
oauth_request, consumer, token)
raise OAuthError('Invalid signature. Expected signature base '
'string: %s' % base)
built = signature_method.build_signature(oauth_request, consumer, token)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = abs(now - timestamp)
if lapsed > self.timestamp_threshold:
raise OAuthError('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' %
(timestamp, now, self.timestamp_threshold))
def _check_nonce(self, consumer, token, nonce):
"""Verify that the nonce is uniqueish."""
nonce = self.data_store.lookup_nonce(consumer, token, nonce)
if nonce:
raise OAuthError('Nonce already used: %s' % str(nonce))
class OAuthClient(object):
"""OAuthClient is a worker to attempt to execute a request."""
consumer = None
token = None
def __init__(self, oauth_consumer, oauth_token):
self.consumer = oauth_consumer
self.token = oauth_token
def get_consumer(self):
return self.consumer
def get_token(self):
return self.token
def fetch_request_token(self, oauth_request):
"""-> OAuthToken."""
raise NotImplementedError
def fetch_access_token(self, oauth_request):
"""-> OAuthToken."""
raise NotImplementedError
def access_resource(self, oauth_request):
"""-> Some protected resource."""
raise NotImplementedError
class OAuthDataStore(object):
"""A database abstraction used to lookup consumers and tokens."""
def lookup_consumer(self, key):
"""-> OAuthConsumer."""
raise NotImplementedError
def lookup_token(self, oauth_consumer, token_type, token_token):
"""-> OAuthToken."""
raise NotImplementedError
def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
"""-> OAuthToken."""
raise NotImplementedError
def fetch_request_token(self, oauth_consumer, oauth_callback):
"""-> OAuthToken."""
raise NotImplementedError
def fetch_access_token(self, oauth_consumer, oauth_token, oauth_verifier):
"""-> OAuthToken."""
raise NotImplementedError
def authorize_request_token(self, oauth_token, user):
"""-> OAuthToken."""
raise NotImplementedError
class OAuthSignatureMethod(object):
"""A strategy class that implements a signature method."""
def get_name(self):
"""-> str."""
raise NotImplementedError
def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
"""-> str key, str raw."""
raise NotImplementedError
def build_signature(self, oauth_request, oauth_consumer, oauth_token):
"""-> str."""
raise NotImplementedError
def check_signature(self, oauth_request, consumer, token, signature):
built = self.build_signature(oauth_request, consumer, token)
return built == signature
class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
def get_name(self):
return 'HMAC-SHA1'
def build_signature_base_string(self, oauth_request, consumer, token):
sig = (
escape(oauth_request.get_normalized_http_method()),
escape(oauth_request.get_normalized_http_url()),
escape(oauth_request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def build_signature(self, oauth_request, consumer, token):
"""Builds the base signature string."""
key, raw = self.build_signature_base_string(oauth_request, consumer,
token)
# HMAC object.
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except:
import sha # Deprecated
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
def get_name(self):
return 'PLAINTEXT'
def build_signature_base_string(self, oauth_request, consumer, token):
"""Concatenates the consumer key and secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def build_signature(self, oauth_request, consumer, token):
key, raw = self.build_signature_base_string(oauth_request, consumer,
token)
return key
| chilitechno/barrioSquare | oauth.py | Python | gpl-3.0 | 23,750 |
import enlighten
# prevent name shadowing
__enumerate = enumerate
def iterate(iterable):
return Progress(iterable)
def enumerate(iterable):
return __enumerate(iterate(iterable))
class Progress:
def __init__(self, iterable):
try:
total = len(iterable)
except (TypeError, AttributeError):
total = None
self.iterable = iterable
self.manager = enlighten.get_manager()
self.pbar = self.manager.counter(total=total)
def __iter__(self):
for item in self.iterable:
yield item
self.pbar.update()
self.manager.stop()
| grade-it/assigner | assigner/progress.py | Python | mit | 633 |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import remi.gui as gui
from remi import start, App
from threading import Timer
class MyApp(App):
def __init__(self, *args):
super(MyApp, self).__init__(*args)
def idle(self):
self.counter.set_text('Running Time: ' + str(self.count))
self.progress.set_value(self.count%100)
def main(self):
# the margin 0px auto centers the main container
verticalContainer = gui.Widget(width=540, margin='0px auto', style={'display': 'block', 'overflow': 'hidden'})
horizontalContainer = gui.Widget(width='100%', layout_orientation=gui.Widget.LAYOUT_HORIZONTAL, margin='0px', style={'display': 'block', 'overflow': 'auto'})
subContainerLeft = gui.Widget(width=320, style={'display': 'block', 'overflow': 'auto', 'text-align': 'center'})
self.img = gui.Image('/res:logo.png', height=100, margin='10px')
self.img.onclick.do(self.on_img_clicked)
self.table = gui.Table.new_from_list([('ID', 'First Name', 'Last Name'),
('101', 'Danny', 'Young'),
('102', 'Christine', 'Holand'),
('103', 'Lars', 'Gordon'),
('104', 'Roberto', 'Robitaille'),
('105', 'Maria', 'Papadopoulos')], width=300, height=200, margin='10px')
self.table.on_table_row_click.do(self.on_table_row_click)
# the arguments are width - height - layoutOrientationOrizontal
subContainerRight = gui.Widget(style={'width': '220px', 'display': 'block', 'overflow': 'auto', 'text-align': 'center'})
self.count = 0
self.counter = gui.Label('', width=200, height=30, margin='10px')
self.lbl = gui.Label('This is a LABEL!', width=200, height=30, margin='10px')
self.bt = gui.Button('Press me!', width=200, height=30, margin='10px')
# setting the listener for the onclick event of the button
self.bt.onclick.do(self.on_button_pressed)
self.txt = gui.TextInput(width=200, height=30, margin='10px')
self.txt.set_text('This is a TEXTAREA')
self.txt.onchange.do(self.on_text_area_change)
self.spin = gui.SpinBox(1, 0, 100, width=200, height=30, margin='10px')
self.spin.onchange.do(self.on_spin_change)
self.progress = gui.Progress(1, 100, width=200, height=5)
self.check = gui.CheckBoxLabel('Label checkbox', True, width=200, height=30, margin='10px')
self.check.onchange.do(self.on_check_change)
self.btInputDiag = gui.Button('Open InputDialog', width=200, height=30, margin='10px')
self.btInputDiag.onclick.do(self.open_input_dialog)
self.btFileDiag = gui.Button('File Selection Dialog', width=200, height=30, margin='10px')
self.btFileDiag.onclick.do(self.open_fileselection_dialog)
self.btUploadFile = gui.FileUploader('./', width=200, height=30, margin='10px')
self.btUploadFile.onsuccess.do(self.fileupload_on_success)
self.btUploadFile.onfailed.do(self.fileupload_on_failed)
items = ('Danny Young','Christine Holand','Lars Gordon','Roberto Robitaille')
self.listView = gui.ListView.new_from_list(items, width=300, height=120, margin='10px')
self.listView.onselection.do(self.list_view_on_selected)
self.link = gui.Link("http://localhost:8081", "A link to here", width=200, height=30, margin='10px')
self.dropDown = gui.DropDown.new_from_list(('DropDownItem 0', 'DropDownItem 1'),
width=200, height=20, margin='10px')
self.dropDown.onchange.do(self.drop_down_changed)
self.dropDown.select_by_value('DropDownItem 0')
self.slider = gui.Slider(10, 0, 100, 5, width=200, height=20, margin='10px')
self.slider.onchange.do(self.slider_changed)
self.colorPicker = gui.ColorPicker('#ffbb00', width=200, height=20, margin='10px')
self.colorPicker.onchange.do(self.color_picker_changed)
self.date = gui.Date('2015-04-13', width=200, height=20, margin='10px')
self.date.onchange.do(self.date_changed)
self.video = gui.Widget( _type='iframe', width=290, height=200, margin='10px')
self.video.attributes['src'] = "https://drive.google.com/file/d/0B0J9Lq_MRyn4UFRsblR3UTBZRHc/preview"
self.video.attributes['width'] = '100%'
self.video.attributes['height'] = '100%'
self.video.attributes['controls'] = 'true'
self.video.style['border'] = 'none'
self.tree = gui.TreeView(width='100%', height=300)
ti1 = gui.TreeItem("Item1")
ti2 = gui.TreeItem("Item2")
ti3 = gui.TreeItem("Item3")
subti1 = gui.TreeItem("Sub Item1")
subti2 = gui.TreeItem("Sub Item2")
subti3 = gui.TreeItem("Sub Item3")
subti4 = gui.TreeItem("Sub Item4")
subsubti1 = gui.TreeItem("Sub Sub Item1")
subsubti2 = gui.TreeItem("Sub Sub Item2")
subsubti3 = gui.TreeItem("Sub Sub Item3")
self.tree.append([ti1, ti2, ti3])
ti2.append([subti1, subti2, subti3, subti4])
subti4.append([subsubti1, subsubti2, subsubti3])
# appending a widget to another, the first argument is a string key
subContainerRight.append([self.counter, self.lbl, self.bt, self.txt, self.spin, self.progress, self.check, self.btInputDiag, self.btFileDiag])
# use a defined key as we replace this widget later
fdownloader = gui.FileDownloader('download test', '../remi/res/logo.png', width=200, height=30, margin='10px')
subContainerRight.append(fdownloader, key='file_downloader')
subContainerRight.append([self.btUploadFile, self.dropDown, self.slider, self.colorPicker, self.date, self.tree])
self.subContainerRight = subContainerRight
subContainerLeft.append([self.img, self.table, self.listView, self.link, self.video])
horizontalContainer.append([subContainerLeft, subContainerRight])
menu = gui.Menu(width='100%', height='30px')
m1 = gui.MenuItem('File', width=100, height=30)
m2 = gui.MenuItem('View', width=100, height=30)
m2.onclick.do(self.menu_view_clicked)
m11 = gui.MenuItem('Save', width=100, height=30)
m12 = gui.MenuItem('Open', width=100, height=30)
m12.onclick.do(self.menu_open_clicked)
m111 = gui.MenuItem('Save', width=100, height=30)
m111.onclick.do(self.menu_save_clicked)
m112 = gui.MenuItem('Save as', width=100, height=30)
m112.onclick.do(self.menu_saveas_clicked)
m3 = gui.MenuItem('Dialog', width=100, height=30)
m3.onclick.do(self.menu_dialog_clicked)
menu.append([m1, m2, m3])
m1.append([m11, m12])
m11.append([m111, m112])
menubar = gui.MenuBar(width='100%', height='30px')
menubar.append(menu)
verticalContainer.append([menubar, horizontalContainer])
#this flag will be used to stop the display_counter Timer
self.stop_flag = False
# kick of regular display of counter
self.display_counter()
# returning the root widget
return verticalContainer
def display_counter(self):
self.count += 1
if not self.stop_flag:
Timer(1, self.display_counter).start()
def menu_dialog_clicked(self, widget):
self.dialog = gui.GenericDialog(title='Dialog Box', message='Click Ok to transfer content to main page', width='500px')
self.dtextinput = gui.TextInput(width=200, height=30)
self.dtextinput.set_value('Initial Text')
self.dialog.add_field_with_label('dtextinput', 'Text Input', self.dtextinput)
self.dcheck = gui.CheckBox(False, width=200, height=30)
self.dialog.add_field_with_label('dcheck', 'Label Checkbox', self.dcheck)
values = ('Danny Young', 'Christine Holand', 'Lars Gordon', 'Roberto Robitaille')
self.dlistView = gui.ListView.new_from_list(values, width=200, height=120)
self.dialog.add_field_with_label('dlistView', 'Listview', self.dlistView)
self.ddropdown = gui.DropDown.new_from_list(('DropDownItem 0', 'DropDownItem 1'),
width=200, height=20)
self.dialog.add_field_with_label('ddropdown', 'Dropdown', self.ddropdown)
self.dspinbox = gui.SpinBox(min=0, max=5000, width=200, height=20)
self.dspinbox.set_value(50)
self.dialog.add_field_with_label('dspinbox', 'Spinbox', self.dspinbox)
self.dslider = gui.Slider(10, 0, 100, 5, width=200, height=20)
self.dspinbox.set_value(50)
self.dialog.add_field_with_label('dslider', 'Slider', self.dslider)
self.dcolor = gui.ColorPicker(width=200, height=20)
self.dcolor.set_value('#ffff00')
self.dialog.add_field_with_label('dcolor', 'Colour Picker', self.dcolor)
self.ddate = gui.Date(width=200, height=20)
self.ddate.set_value('2000-01-01')
self.dialog.add_field_with_label('ddate', 'Date', self.ddate)
self.dialog.confirm_dialog.do(self.dialog_confirm)
self.dialog.show(self)
def dialog_confirm(self, widget):
result = self.dialog.get_field('dtextinput').get_value()
self.txt.set_value(result)
result = self.dialog.get_field('dcheck').get_value()
self.check.set_value(result)
result = self.dialog.get_field('ddropdown').get_value()
self.dropDown.select_by_value(result)
result = self.dialog.get_field('dspinbox').get_value()
self.spin.set_value(result)
result = self.dialog.get_field('dslider').get_value()
self.slider.set_value(result)
result = self.dialog.get_field('dcolor').get_value()
self.colorPicker.set_value(result)
result = self.dialog.get_field('ddate').get_value()
self.date.set_value(result)
result = self.dialog.get_field('dlistView').get_value()
self.listView.select_by_value(result)
# listener function
def on_img_clicked(self, widget):
self.lbl.set_text('Image clicked!')
def on_table_row_click(self, table, row, item):
self.lbl.set_text('Table Item clicked: ' + item.get_text())
def on_button_pressed(self, widget):
self.lbl.set_text('Button pressed! ')
self.bt.set_text('Hi!')
def on_text_area_change(self, widget, newValue):
self.lbl.set_text('Text Area value changed!')
def on_spin_change(self, widget, newValue):
self.lbl.set_text('SpinBox changed, new value: ' + str(newValue))
def on_check_change(self, widget, newValue):
self.lbl.set_text('CheckBox changed, new value: ' + str(newValue))
def open_input_dialog(self, widget):
self.inputDialog = gui.InputDialog('Input Dialog', 'Your name?',
initial_value='type here',
width=500, height=160)
self.inputDialog.confirm_value.do(
self.on_input_dialog_confirm)
# here is returned the Input Dialog widget, and it will be shown
self.inputDialog.show(self)
def on_input_dialog_confirm(self, widget, value):
self.lbl.set_text('Hello ' + value)
def open_fileselection_dialog(self, widget):
self.fileselectionDialog = gui.FileSelectionDialog('File Selection Dialog', 'Select files and folders', False,
'.')
self.fileselectionDialog.confirm_value.do(
self.on_fileselection_dialog_confirm)
# here is returned the Input Dialog widget, and it will be shown
self.fileselectionDialog.show(self)
def on_fileselection_dialog_confirm(self, widget, filelist):
# a list() of filenames and folders is returned
self.lbl.set_text('Selected files: %s' % ','.join(filelist))
if len(filelist):
f = filelist[0]
# replace the last download link
fdownloader = gui.FileDownloader("download selected", f, width=200, height=30)
self.subContainerRight.append(fdownloader, key='file_downloader')
def list_view_on_selected(self, widget, selected_item_key):
""" The selection event of the listView, returns a key of the clicked event.
You can retrieve the item rapidly
"""
self.lbl.set_text('List selection: ' + self.listView.children[selected_item_key].get_text())
def drop_down_changed(self, widget, value):
self.lbl.set_text('New Combo value: ' + value)
def slider_changed(self, widget, value):
self.lbl.set_text('New slider value: ' + str(value))
def color_picker_changed(self, widget, value):
self.lbl.set_text('New color value: ' + value)
def date_changed(self, widget, value):
self.lbl.set_text('New date value: ' + value)
def menu_save_clicked(self, widget):
self.lbl.set_text('Menu clicked: Save')
def menu_saveas_clicked(self, widget):
self.lbl.set_text('Menu clicked: Save As')
def menu_open_clicked(self, widget):
self.lbl.set_text('Menu clicked: Open')
def menu_view_clicked(self, widget):
self.lbl.set_text('Menu clicked: View')
def fileupload_on_success(self, widget, filename):
self.lbl.set_text('File upload success: ' + filename)
def fileupload_on_failed(self, widget, filename):
self.lbl.set_text('File upload failed: ' + filename)
def on_close(self):
""" Overloading App.on_close event to stop the Timer.
"""
self.stop_flag = True
super(MyApp, self).on_close()
if __name__ == "__main__":
# starts the webserver
# optional parameters
# start(MyApp,address='127.0.0.1', port=8081, multiple_instance=False,enable_file_cache=True, update_interval=0.1, start_browser=True)
import ssl
start(MyApp, debug=True, address='0.0.0.0', port=8081, start_browser=True, multiple_instance=True)
| dddomodossola/gui | examples/widgets_overview_app.py | Python | apache-2.0 | 14,647 |
import os, sys, subprocess, threading, cherrypy, webbrowser, sqlite3
import datetime
from lib.configobj import ConfigObj
from lib.apscheduler.scheduler import Scheduler
import threading
from lazylibrarian import logger, postprocess, searchnzb
FULL_PATH = None
PROG_DIR = None
ARGS = None
SIGNAL = None
LOGLEVEL = 1
DAEMON = False
PIDFILE = None
SYS_ENCODING = None
SCHED = Scheduler()
INIT_LOCK = threading.Lock()
__INITIALIZED__ = False
started = False
DATADIR = None
DBFILE=None
CONFIGFILE = None
CFG = None
LOGDIR = None
LOGLIST = []
HTTP_HOST = None
HTTP_PORT = None
HTTP_USER = None
HTTP_PASS = None
HTTP_ROOT = None
HTTP_LOOK = None
LAUNCH_BROWSER = False
SAB_HOST = None
SAB_PORT = None
SAB_USER = None
SAB_PASS = None
SAB_API = None
SAB_CAT = None
DESTINATION_COPY = False
DESTINATION_DIR = None
DOWNLOAD_DIR = None
BLACKHOLE = False
BLACKHOLEDIR = None
USENET_RETENTION = None
IMP_PREFLANG = 'en'
IMP_ONLYISBN = False
GR_API = 'ckvsiSDsuqh7omh74ZZ6Q'
NZBMATRIX = False
NZBMATRIX_USER = None
NZBMATRIX_API = None
NEWZNAB = False
NEWZNAB_HOST = None
NEWZNAB_API = None
NEWZBIN = False
NEWZBIN_UID = None
NEWZBIN_PASSWORD = None
SEARCH_INTERVAL = 360
SCAN_INTERVAL = 10
def CheckSection(sec):
""" Check if INI section exists, if not create it """
try:
CFG[sec]
return True
except:
CFG[sec] = {}
return False
#################################################################################
## Check_setting_int #
#################################################################################
#def minimax(val, low, high):
# """ Return value forced within range """
# try:
# val = int(val)
# except:
# val = 0
# if val < low:
# return low
# if val > high:
# return high
# return val
################################################################################
# Check_setting_int #
################################################################################
def check_setting_int(config, cfg_name, item_name, def_val):
try:
my_val = int(config[cfg_name][item_name])
except:
my_val = def_val
try:
config[cfg_name][item_name] = my_val
except:
config[cfg_name] = {}
config[cfg_name][item_name] = my_val
logger.debug(item_name + " -> " + str(my_val))
return my_val
#################################################################################
## Check_setting_float #
#################################################################################
##def check_setting_float(config, cfg_name, item_name, def_val):
## try:
## my_val = float(config[cfg_name][item_name])
## except:
## my_val = def_val
## try:
## config[cfg_name][item_name] = my_val
## except:
## config[cfg_name] = {}
## config[cfg_name][item_name] = my_val
## return my_val
################################################################################
# Check_setting_str #
################################################################################
def check_setting_str(config, cfg_name, item_name, def_val, log=True):
try:
my_val = config[cfg_name][item_name]
except:
my_val = def_val
try:
config[cfg_name][item_name] = my_val
except:
config[cfg_name] = {}
config[cfg_name][item_name] = my_val
if log:
logger.debug(item_name + " -> " + my_val)
else:
logger.debug(item_name + " -> ******")
return my_val
def initialize():
with INIT_LOCK:
global __INITIALIZED__, FULL_PATH, PROG_DIR, LOGLEVEL, DAEMON, DATADIR, CONFIGFILE, CFG, LOGDIR, HTTP_HOST, HTTP_PORT, HTTP_USER, HTTP_PASS, HTTP_ROOT, HTTP_LOOK, LAUNCH_BROWSER, LOGDIR, CACHEDIR, \
IMP_ONLYISBN, IMP_PREFLANG, SAB_HOST, SAB_PORT, SAB_API, SAB_USER, SAB_PASS, DESTINATION_DIR, DESTINATION_COPY, DOWNLOAD_DIR, SAB_CAT, USENET_RETENTION, BLACKHOLE, BLACKHOLEDIR, GR_API, \
NZBMATRIX, NZBMATRIX_USER, NZBMATRIX_API, NEWZNAB, NEWZNAB_HOST, NEWZNAB_API, NEWZBIN, NEWZBIN_UID, NEWZBIN_PASS
if __INITIALIZED__:
return False
CheckSection('General')
CheckSection('SABnzbd')
try:
HTTP_PORT = check_setting_int(CFG, 'General', 'http_port', 5299)
except:
HTTP_PORT = 5299
if HTTP_PORT < 21 or HTTP_PORT > 65535:
HTTP_PORT = 5299
HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0')
HTTP_USER = check_setting_str(CFG, 'General', 'http_user', '')
HTTP_PASS = check_setting_str(CFG, 'General', 'http_pass', '')
HTTP_ROOT = check_setting_str(CFG, 'General', 'http_root', '')
HTTP_LOOK = check_setting_str(CFG, 'General', 'http_look', 'default')
LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
LOGDIR = check_setting_str(CFG, 'General', 'logdir', '')
IMP_PREFLANG = check_setting_str(CFG, 'General', 'imp_preflang', IMP_PREFLANG)
IMP_ONLYISBN = bool(check_setting_int(CFG, 'General', 'imp_onlyisbn', 0))
SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
SAB_PORT = check_setting_str(CFG, 'SABnzbd', 'sab_port', '')
SAB_USER = check_setting_str(CFG, 'SABnzbd', 'sab_user', '')
SAB_PASS = check_setting_str(CFG, 'SABnzbd', 'sab_pass', '')
SAB_API = check_setting_str(CFG, 'SABnzbd', 'sab_api', '')
SAB_CAT = check_setting_str(CFG, 'SABnzbd', 'sab_cat', '')
DESTINATION_COPY = bool(check_setting_int(CFG, 'General', 'destination_copy', 0))
DESTINATION_DIR = check_setting_str(CFG, 'General','destination_dir', '')
DOWNLOAD_DIR = check_setting_str(CFG, 'General', 'download_dir', '')
BLACKHOLE = bool(check_setting_int(CFG, 'General', 'blackhole', 0))
BLACKHOLEDIR = check_setting_str(CFG, 'General', 'blackholedir', '')
USENET_RETENTION = check_setting_str(CFG, 'General', 'usenet_retention', '')
NZBMATRIX = bool(check_setting_int(CFG, 'NZBMatrix', 'nzbmatrix', 0))
NZBMATRIX_USER = check_setting_str(CFG, 'NZBMatrix', 'nzbmatrix_user', '')
NZBMATRIX_API = check_setting_str(CFG, 'NZBMatrix', 'nzbmatrix_api', '')
NEWZNAB = bool(check_setting_int(CFG, 'Newznab', 'newznab', 0))
NEWZNAB_HOST = check_setting_str(CFG, 'Newznab', 'newznab_host', '')
NEWZNAB_API = check_setting_str(CFG, 'Newznab', 'newznab_api', '')
NEWZBIN = bool(check_setting_int(CFG, 'Newzbin', 'newzbin', 0))
NEWZBIN_UID = check_setting_str(CFG, 'Newzbin', 'newzbin_uid', '')
NEWZBIN_PASS = check_setting_str(CFG, 'Newzbin', 'newzbin_pass', '')
if not LOGDIR:
LOGDIR = os.path.join(DATADIR, 'Logs')
# Put the cache dir in the data dir for now
CACHEDIR = os.path.join(DATADIR, 'cache')
if not os.path.exists(CACHEDIR):
try:
os.makedirs(CACHEDIR)
except OSError:
logger.error('Could not create cachedir. Check permissions of: ' + DATADIR)
# Create logdir
if not os.path.exists(LOGDIR):
try:
os.makedirs(LOGDIR)
except OSError:
if LOGLEVEL:
print(LOGDIR + ":")
print(' Unable to create folder for logs. Only logging to console.')
# Start the logger, silence console logging if we need to
logger.lazylibrarian_log.initLogger(loglevel=LOGLEVEL)
# Initialize the database
try:
dbcheck()
except Exception as e:
logger.error("Can't connect to the database: %s" % e)
__INITIALIZED__ = True
return True
def daemonize():
"""
Fork off as a daemon
"""
# Make a non-session-leader child process
try:
pid = os.fork() #@UndefinedVariable - only available in UNIX
if pid != 0:
sys.exit(0)
except OSError as e:
raise RuntimeError("1st fork failed: %s [%d]" %
(e.strerror, e.errno))
os.setsid() #@UndefinedVariable - only available in UNIX
# Make sure I can read my own files and shut out others
prev = os.umask(0)
os.umask(prev and int('077', 8))
# Make the child a session-leader by detaching from the terminal
try:
pid = os.fork() #@UndefinedVariable - only available in UNIX
if pid != 0:
sys.exit(0)
except OSError as e:
raise RuntimeError("2st fork failed: %s [%d]" %
(e.strerror, e.errno))
dev_null = file('/dev/null', 'r')
os.dup2(dev_null.fileno(), sys.stdin.fileno())
if PIDFILE:
pid = str(os.getpid())
logger.debug("Writing PID " + pid + " to " + str(PIDFILE))
file(PIDFILE, 'w').write("%s\n" % pid)
def launch_browser(host, port, root):
if host == '0.0.0.0':
host = 'localhost'
try:
webbrowser.open('http://%s:%i%s' % (host, port, root))
except Exception as e:
logger.error('Could not launch browser: %s' % e)
def config_write():
new_config = ConfigObj()
new_config.filename = CONFIGFILE
new_config['General'] = {}
new_config['General']['http_port'] = HTTP_PORT
new_config['General']['http_host'] = HTTP_HOST
new_config['General']['http_user'] = HTTP_USER
new_config['General']['http_pass'] = HTTP_PASS
new_config['General']['http_root'] = HTTP_ROOT
new_config['General']['http_look'] = HTTP_LOOK
new_config['General']['launch_browser'] = int(LAUNCH_BROWSER)
new_config['General']['logdir'] = LOGDIR
new_config['General']['imp_onlyisbn'] = int(IMP_ONLYISBN)
new_config['General']['imp_preflang'] = IMP_PREFLANG
new_config['SABnzbd'] = {}
new_config['SABnzbd']['sab_host'] = SAB_HOST
new_config['SABnzbd']['sab_port'] = SAB_PORT
new_config['SABnzbd']['sab_user'] = SAB_USER
new_config['SABnzbd']['sab_pass'] = SAB_PASS
new_config['SABnzbd']['sab_api'] = SAB_API
new_config['SABnzbd']['sab_cat'] = SAB_CAT
new_config['General']['destination_dir'] = DESTINATION_DIR
new_config['General']['destination_copy'] = int(DESTINATION_COPY)
new_config['General']['download_dir'] = DOWNLOAD_DIR
new_config['General']['blackhole'] = int(BLACKHOLE)
new_config['General']['blackholedir'] = BLACKHOLEDIR
new_config['General']['usenet_retention'] = USENET_RETENTION
new_config['NZBMatrix'] = {}
new_config['NZBMatrix']['nzbmatrix'] = int(NZBMATRIX)
new_config['NZBMatrix']['nzbmatrix_user'] = NZBMATRIX_USER
new_config['NZBMatrix']['nzbmatrix_api'] = NZBMATRIX_API
new_config['Newznab'] = {}
new_config['Newznab']['newznab'] = int(NEWZNAB)
new_config['Newznab']['newznab_host'] = NEWZNAB_HOST
new_config['Newznab']['newznab_api'] = NEWZNAB_API
new_config['Newzbin'] = {}
new_config['Newzbin']['newzbin'] = int(NEWZBIN)
new_config['Newzbin']['newzbin_uid'] = NEWZBIN_UID
new_config['Newzbin']['newzbin_pass'] = NEWZBIN_PASS
new_config.write()
def dbcheck():
conn=sqlite3.connect(DBFILE)
c=conn.cursor()
c.execute('CREATE TABLE IF NOT EXISTS authors (AuthorID TEXT, AuthorName TEXT UNIQUE, AuthorImg TEXT, AuthorLink TEXT, DateAdded TEXT, Status TEXT, LastBook TEXT, LastLink Text, LastDate TEXT, HaveBooks INTEGER, TotalBooks INTEGER, AuthorBorn TEXT, AuthorDeath TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS books (AuthorID TEXT, AuthorName TEXT, AuthorLink TEXT, BookName TEXT, BookSub TEXT, BookDesc TEXT, BookGenre TEXT, BookIsbn TEXT, BookPub TEXT, BookRate INTEGER, BookImg TEXT, BookPages INTEGER, BookLink TEXT, BookID TEXT UNIQUE, BookDate TEXT, BookLang TEXT, BookAdded TEXT, Status TEXT)')
c.execute('CREATE TABLE IF NOT EXISTS wanted (BookID TEXT, NZBurl TEXT, NZBtitle TEXT, NZBdate TEXT, NZBprov TEXT, Status TEXT)')
try:
logger.info('Checking database')
c.execute('SELECT BookSub from books')
except sqlite3.OperationalError:
logger.info('Updating database to hold book subtitles.')
c.execute('ALTER TABLE books ADD COLUMN BookSub TEXT')
try:
c.execute('SELECT BookPub from books')
except sqlite3.OperationalError:
logger.info('Updating database to hold book publisher')
c.execute('ALTER TABLE books ADD COLUMN BookPub TEXT')
try:
c.execute('SELECT BookGenre from books')
except sqlite3.OperationalError:
logger.info('Updating database to hold bookgenre')
c.execute('ALTER TABLE books ADD COLUMN BookGenre TEXT')
conn.commit()
c.close()
def start():
global __INITIALIZED__, started
if __INITIALIZED__:
# Crons and scheduled jobs go here
starttime = datetime.datetime.now()
SCHED.add_interval_job(postprocess.processDir, minutes=SCAN_INTERVAL, start_date=starttime+datetime.timedelta(minutes=1))
SCHED.add_interval_job(searchnzb.searchbook, minutes=SEARCH_INTERVAL, start_date=starttime+datetime.timedelta(hours=1))
SCHED.start()
# for job in SCHED.get_jobs():
# print job
started = True
def shutdown(restart=False):
config_write()
logger.info('LazyLibrarian is shutting down ...')
cherrypy.engine.exit()
SCHED.shutdown(wait=True)
if PIDFILE :
logger.info('Removing pidfile %s' % PIDFILE)
os.remove(PIDFILE)
if restart:
logger.info('LazyLibrarian is restarting ...')
popen_list = [sys.executable, FULL_PATH]
popen_list += ARGS
if '--nolaunch' not in popen_list:
popen_list += ['--nolaunch']
logger.info('Restarting LazyLibrarian with ' + str(popen_list))
subprocess.Popen(popen_list, cwd=os.getcwd())
os._exit(0)
| casanovainformationservices/LazyLibrarian | lazylibrarian/__init__.py | Python | gpl-3.0 | 14,102 |
'''
Created on 13-09-2013
@author: kamil
'''
from django.contrib import admin
from officetune.models import Song, Vote
admin.site.register(Vote)
admin.site.register(Song)
| karolmajta/officetune | server/src/officetune/officetune/admin.py | Python | mit | 175 |
import re, sre
class Nth(object):
def __init__(self, repl, n=0):
self.n = n
self.repl = repl
def __call__(self, match):
if self.n == 0:
self.n -= 1
return match.expand(self.repl)
else:
self.n -= 1
return match.group(0)
rex = re.compile('a(\w+)')
print rex.sub(Nth(r'A\1', 0), "aardvark abbot abide")
| rik0/rk-exempla | misc/python/match_nth.py | Python | mit | 394 |
import json
import execjs._exceptions as exceptions
from execjs._abstract_runtime import AbstractRuntime
from execjs._abstract_runtime_context import AbstractRuntimeContext
from execjs._misc import encode_unicode_codepoints
try:
import PyV8
except ImportError:
_pyv8_available = False
else:
_pyv8_available = True
class PyV8Runtime(AbstractRuntime):
'''Runtime to execute codes with PyV8.'''
def __init__(self):
pass
@property
def name(self):
return "PyV8"
def _compile(self, source, cwd=None):
return self.Context(source)
def is_available(self):
return _pyv8_available
class Context(AbstractRuntimeContext):
def __init__(self, source=""):
self._source = source
def is_available(self):
return _pyv8_available
def _exec_(self, source):
source = '''\
(function() {{
{0};
{1};
}})()'''.format(
encode_unicode_codepoints(self._source),
encode_unicode_codepoints(source)
)
source = str(source)
# backward compatibility
with PyV8.JSContext() as ctxt, PyV8.JSEngine() as engine:
js_errors = (PyV8.JSError, IndexError, ReferenceError, SyntaxError, TypeError)
try:
script = engine.compile(source)
except js_errors as e:
raise exceptions.ProgramError(e)
try:
value = script.run()
except js_errors as e:
raise exceptions.ProgramError(e)
return self.convert(value)
def _eval(self, source):
return self.exec_('return ' + encode_unicode_codepoints(source))
def _call(self, identifier, *args):
args = json.dumps(args)
return self.eval("{identifier}.apply(this, {args})".format(identifier=identifier, args=args))
@classmethod
def convert(cls, obj):
from PyV8 import _PyV8
if isinstance(obj, bytes):
return obj.decode('utf8')
if isinstance(obj, _PyV8.JSArray):
return [cls.convert(v) for v in obj]
elif isinstance(obj, _PyV8.JSFunction):
return None
elif isinstance(obj, _PyV8.JSObject):
ret = {}
for k in obj.keys():
v = cls.convert(obj[k])
if v is not None:
ret[cls.convert(k)] = v
return ret
else:
return obj
| doloopwhile/PyExecJS | execjs/_pyv8runtime.py | Python | mit | 2,669 |
# Copyright (c) 2003, The Regents of the University of California,
# through Lawrence Berkeley National Laboratory (subject to receipt of
# any required approvals from the U.S. Dept. of Energy). All rights
# reserved.
#
"""Logging"""
ident = "$Id: logging.py 1395 2007-06-14 06:49:35Z boverhof $"
import os, sys
WARN = 1
DEBUG = 2
class ILogger:
'''Logger interface, by default this class
will be used and logging calls are no-ops.
'''
level = 0
def __init__(self, msg):
return
def warning(self, *args, **kw):
return
def debug(self, *args, **kw):
return
def error(self, *args, **kw):
return
def setLevel(cls, level):
cls.level = level
setLevel = classmethod(setLevel)
debugOn = lambda self: self.level >= DEBUG
warnOn = lambda self: self.level >= WARN
class BasicLogger(ILogger):
last = ''
def __init__(self, msg, out=sys.stdout):
self.msg, self.out = msg, out
def warning(self, msg, *args, **kw):
if self.warnOn() is False: return
if BasicLogger.last != self.msg:
BasicLogger.last = self.msg
print >>self, "---- ", self.msg, " ----"
print >>self, " %s " %self.WARN,
print >>self, msg %args
WARN = '[WARN]'
def debug(self, msg, *args, **kw):
if self.debugOn() is False: return
if BasicLogger.last != self.msg:
BasicLogger.last = self.msg
print >>self, "---- ", self.msg, " ----"
print >>self, " %s " %self.DEBUG,
print >>self, msg %args
DEBUG = '[DEBUG]'
def error(self, msg, *args, **kw):
if BasicLogger.last != self.msg:
BasicLogger.last = self.msg
print >>self, "---- ", self.msg, " ----"
print >>self, " %s " %self.ERROR,
print >>self, msg %args
ERROR = '[ERROR]'
def write(self, *args):
'''Write convenience function; writes strings.
'''
for s in args: self.out.write(s)
event = ''.join(*args)
_LoggerClass = BasicLogger
class GridLogger(ILogger):
def debug(self, msg, *args, **kw):
kw['component'] = self.msg
gridLog(event=msg %args, level='DEBUG', **kw)
def warning(self, msg, *args, **kw):
kw['component'] = self.msg
gridLog(event=msg %args, level='WARNING', **kw)
def error(self, msg, *args, **kw):
kw['component'] = self.msg
gridLog(event=msg %args, level='ERROR', **kw)
#
# Registry of send functions for gridLog
#
GLRegistry = {}
class GLRecord(dict):
"""Grid Logging Best Practices Record, Distributed Logging Utilities
The following names are reserved:
event -- log event name
Below is EBNF for the event name part of a log message.
name = <nodot> ( "." <name> )?
nodot = {RFC3896-chars except "."}
Suffixes:
start: Immediately before the first action in a task.
end: Immediately after the last action in a task (that succeeded).
error: an error condition that does not correspond to an end event.
ts -- timestamp
level -- logging level (see levels below)
status -- integer status code
gid -- global grid identifier
gid, cgid -- parent/child identifiers
prog -- program name
More info: http://www.cedps.net/wiki/index.php/LoggingBestPractices#Python
reserved -- list of reserved names,
omitname -- list of reserved names, output only values ('ts', 'event',)
levels -- dict of levels and description
"""
reserved = ('ts', 'event', 'level', 'status', 'gid', 'prog')
omitname = ()
levels = dict(FATAL='Component cannot continue, or system is unusable.',
ALERT='Action must be taken immediately.',
CRITICAL='Critical conditions (on the system).',
ERROR='Errors in the component; not errors from elsewhere.',
WARNING='Problems that are recovered from, usually.',
NOTICE='Normal but significant condition.',
INFO='Informational messages that would be useful to a deployer or administrator.',
DEBUG='Lower level information concerning program logic decisions, internal state, etc.',
TRACE='Finest granularity, similar to "stepping through" the component or system.',
)
def __init__(self, date=None, **kw):
kw['ts'] = date or self.GLDate()
kw['gid'] = kw.get('gid') or os.getpid()
dict.__init__(self, kw)
def __str__(self):
"""
"""
from cStringIO import StringIO
s = StringIO(); n = " "
reserved = self.reserved; omitname = self.omitname; levels = self.levels
for k in ( list(filter(lambda i: self.has_key(i), reserved)) +
list(filter(lambda i: i not in reserved, self.keys()))
):
v = self[k]
if k in omitname:
s.write( "%s " %self.format[type(v)](v) )
continue
if k == reserved[2] and v not in levels:
pass
s.write( "%s=%s " %(k, self.format[type(v)](v) ) )
s.write("\n")
return s.getvalue()
class GLDate(str):
"""Grid logging Date Format
all timestamps should all be in the same time zone (UTC).
Grid timestamp value format that is a highly readable variant of the ISO8601 time standard [1]:
YYYY-MM-DDTHH:MM:SS.SSSSSSZ
"""
def __new__(self, args=None):
"""args -- datetime (year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]])
"""
import datetime
args = args or datetime.datetime.utcnow()
l = (args.year, args.month, args.day, args.hour, args.minute, args.second,
args.microsecond, args.tzinfo or 'Z')
return str.__new__(self, "%04d-%02d-%02dT%02d:%02d:%02d.%06d%s" %l)
format = { int:str, float:lambda x: "%lf" % x, long:str, str:lambda x:x,
unicode:str, GLDate:str, }
def gridLog(**kw):
"""Send GLRecord, Distributed Logging Utilities
If the scheme is passed as a keyword parameter
the value is expected to be a callable function
that takes 2 parameters: url, outputStr
GRIDLOG_ON -- turn grid logging on
GRIDLOG_DEST -- provide URL destination
"""
import os
if not bool( int(os.environ.get('GRIDLOG_ON', 0)) ):
return
url = os.environ.get('GRIDLOG_DEST')
if url is None:
return
## NOTE: urlparse problem w/customized schemes
try:
scheme = url[:url.find('://')]
send = GLRegistry[scheme]
send( url, str(GLRecord(**kw)), )
except Exception, ex:
print >>sys.stderr, "*** gridLog failed -- %s" %(str(kw))
def sendUDP(url, outputStr):
from socket import socket, AF_INET, SOCK_DGRAM
idx1 = url.find('://') + 3; idx2 = url.find('/', idx1)
if idx2 < idx1: idx2 = len(url)
netloc = url[idx1:idx2]
host,port = (netloc.split(':')+[80])[0:2]
socket(AF_INET, SOCK_DGRAM).sendto( outputStr, (host,int(port)), )
def writeToFile(url, outputStr):
print >> open(url.split('://')[1], 'a+'), outputStr
GLRegistry["gridlog-udp"] = sendUDP
GLRegistry["file"] = writeToFile
def setBasicLogger():
'''Use Basic Logger.
'''
setLoggerClass(BasicLogger)
BasicLogger.setLevel(0)
def setGridLogger():
'''Use GridLogger for all logging events.
'''
setLoggerClass(GridLogger)
def setBasicLoggerWARN():
'''Use Basic Logger.
'''
setLoggerClass(BasicLogger)
BasicLogger.setLevel(WARN)
def setBasicLoggerDEBUG():
'''Use Basic Logger.
'''
setLoggerClass(BasicLogger)
BasicLogger.setLevel(DEBUG)
def setLoggerClass(loggingClass):
'''Set Logging Class.
'''
def setLoggerClass(loggingClass):
'''Set Logging Class.
'''
assert issubclass(loggingClass, ILogger), 'loggingClass must subclass ILogger'
global _LoggerClass
_LoggerClass = loggingClass
def setLevel(level=0):
'''Set Global Logging Level.
'''
ILogger.level = level
def getLevel():
return ILogger.level
def getLogger(msg):
'''Return instance of Logging class.
'''
return _LoggerClass(msg)
| rameshg87/pyremotevbox | pyremotevbox/ZSI/wstools/logging.py | Python | apache-2.0 | 8,235 |
# Generated by Django 2.0.6 on 2018-07-19 15:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('iati', '0009_auto_20180710_1747'),
]
operations = [
migrations.AddField(
model_name='documentlink',
name='result',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='iati.Result'),
),
]
| zimmerman-zimmerman/OIPA | OIPA/iati/migrations/0010_documentlink_result.py | Python | agpl-3.0 | 480 |
#! /usr/bin/env python
from FXPy.fox import *
import os
class ShutterWindow(FXMainWindow):
def __init__(self, app):
FXMainWindow.__init__(self, app, "Shutter Widget Test", w=800, h=600)
foldericon = FXPNGIcon(self.getApp(),
open(os.path.join('icons', 'shutter1.png'), 'rb').read())
compressicon = FXPNGIcon(self.getApp(),
open(os.path.join('icons', 'shutter2.png'), 'rb').read())
contents = FXHorizontalFrame(self, LAYOUT_FILL_X|LAYOUT_FILL_Y)
listFrame = FXVerticalFrame(contents, LAYOUT_FILL_X|LAYOUT_FILL_Y|LAYOUT_TOP|LAYOUT_LEFT,0,0,0,0,10,10,10,10)
FXLabel(listFrame, "Tree List", None, JUSTIFY_CENTER_X|LAYOUT_FILL_X)
FXHorizontalSeparator(listFrame, SEPARATOR_GROOVE|LAYOUT_FILL_X);
tree = FXTreeList(listFrame, 10,
opts=FRAME_SUNKEN|FRAME_THICK|LAYOUT_FILL_X|LAYOUT_FILL_Y|LAYOUT_TOP|LAYOUT_RIGHT|TREELIST_SHOWS_LINES|TREELIST_SHOWS_BOXES)
buttonFrame = FXVerticalFrame(contents,
FRAME_RAISED|LAYOUT_FILL_Y|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,10,10)
FXLabel(buttonFrame,"Button Frame",opts=JUSTIFY_CENTER_X|LAYOUT_FILL_X)
FXHorizontalSeparator(buttonFrame, SEPARATOR_RIDGE|LAYOUT_FILL_X)
shutterFrame = FXShutter(buttonFrame, None, 0,
FRAME_SUNKEN|LAYOUT_FILL_Y|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,0,0,0,0,0,0)
shutterItem = FXShutterItem(shutterFrame, "Test 1", None,
LAYOUT_FILL_X|LAYOUT_FILL_Y|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,10,10,10,10)
FXButton(shutterItem.getContent(), None, foldericon, self.getApp(),
FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
FXButton(shutterItem.getContent(), None, compressicon, self.getApp(),
FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
FXButton(shutterItem.getContent(), None, compressicon, self.getApp(),
FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
FXButton(shutterItem.getContent(), None, foldericon, self.getApp(),
FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
shutterItem = FXShutterItem(shutterFrame, "Test 2", None,
LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,0,0,0,0,10,10,10,10,10,10)
FXButton(shutterItem.getContent(), None, foldericon,
self.getApp(), FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
FXButton(shutterItem.getContent(), None, compressicon,
self.getApp(), FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
FXButton(shutterItem.getContent(), None, foldericon,
self.getApp(), FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
shutterItem = FXShutterItem(shutterFrame, "Test 3", None,
LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,0,0,0,0,10,10,10,10,10,10)
FXButton(shutterItem.getContent(), None, foldericon,
self.getApp(), FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
FXButton(shutterItem.getContent(), None, compressicon,
self.getApp(), FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
shutterItem = FXShutterItem(shutterFrame, "Test 4", None,
LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,0,0,0,0,10,10,10,10,10,10)
FXButton(shutterItem.getContent(), None, foldericon,
self.getApp(), FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
FXButton(shutterItem.getContent(), None, compressicon,
self.getApp(), FXApp.ID_QUIT,
FRAME_THICK|FRAME_RAISED|LAYOUT_FILL_X|LAYOUT_TOP|LAYOUT_LEFT,
0,0,0,0,10,10,5,5)
# Create and show the main window
def create(self):
FXMainWindow.create(self)
self.show(PLACEMENT_SCREEN)
# Main program begins here
if __name__ == '__main__':
import sys
application = FXApp("Shutter", "Test")
application.init(sys.argv)
ShutterWindow(application)
application.create()
application.run()
| lylejohnson/FXPy | examples/shutter.py | Python | lgpl-2.1 | 4,680 |
#!/usr/bin/env python2
# -*- coding:utf-8 -*-
"""老赵的Python代码碎片之一
文件: pycode0x001B-server.py
功能: socket通讯小例子(服务器端)
许可: General Public License
作者: Zhao Xin (赵鑫) <[email protected]>
时间: 2013.08.31
"""
import sys
import socket
HOST = '127.0.0.1'
PORT = 8001
SOCK = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
SOCK.bind((HOST, PORT))
SOCK.listen(1)
while True:
print 'Listening...'
conn, addr = SOCK.accept()
conn.settimeout(60) # 60超时自动断开
print 'Connected by', addr
while True:
try:
data = conn.recv(1024) # 接收到的数据
if data:
print 'Data received ->', data
if data == 'bye': # 接收bye则关闭连接
conn.send('bye')
break
elif data == 'quit': # 接收quit则关闭服务器
conn.send('Good bye!')
conn.close()
sys.exit(0)
else:
conn.send(data.upper()[::-1]) # 将处理后的数据发送回客户端
except socket.timeout:
print 'Time out...'
conn.send('Time out!')
break
conn.close()
print 'Connection closed~~'
| archtaurus/laozhao-python-codes | src/pycode0x001B-server.py | Python | gpl-3.0 | 1,319 |
import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from config import configure_app
from flask_login import LoginManager
from flask_session import Session
app = Flask(__name__)
configure_app(app)
db = SQLAlchemy(app)
lm = LoginManager(app)
ss = Session(app)
directory = os.path.join(app.config['BASEDIR'], 'files')
if not os.path.exists(directory):
os.mkdir(directory)
| zadiran/DataworksOnline | main.py | Python | mit | 400 |
#!/usr/bin/env python3
# Event Manager for Uzbl
# Copyright (c) 2009-2010, Mason Larobina <[email protected]>
# Copyright (c) 2009, Dieter Plaetinck <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
EVENT_MANAGER.PY
================
Event manager for uzbl written in python.
'''
import atexit
import configparser
import imp
import logging
import os
import sys
import time
import weakref
import re
import errno
import asyncore
from collections import defaultdict
from functools import partial
from glob import glob
from itertools import count
from optparse import OptionParser
from select import select
from signal import signal, SIGTERM, SIGINT, SIGKILL
from traceback import format_exc
from uzbl.net import Listener, Protocol
from uzbl.core import Uzbl
def xdghome(key, default):
'''Attempts to use the environ XDG_*_HOME paths if they exist otherwise
use $HOME and the default path.'''
xdgkey = "XDG_%s_HOME" % key
if xdgkey in list(os.environ.keys()) and os.environ[xdgkey]:
return os.environ[xdgkey]
return os.path.join(os.environ['HOME'], default)
# Setup xdg paths.
DATA_DIR = os.path.join(xdghome('DATA', '.local/share/'), 'uzbl/')
CACHE_DIR = os.path.join(xdghome('CACHE', '.cache/'), 'uzbl/')
CONFIG_DIR = os.path.join(xdghome('CONFIG', '.config/'), 'uzbl/')
# Define some globals.
SCRIPTNAME = os.path.basename(sys.argv[0])
logger = logging.getLogger(SCRIPTNAME)
def get_exc():
'''Format `format_exc` for logging.'''
return "\n%s" % format_exc().rstrip()
def expandpath(path):
'''Expand and realpath paths.'''
return os.path.realpath(os.path.expandvars(path))
def daemonize():
'''Daemonize the process using the Stevens' double-fork magic.'''
logger.info('entering daemon mode')
try:
if os.fork():
os._exit(0)
except OSError:
logger.critical('failed to daemonize', exc_info=True)
sys.exit(1)
os.chdir('/')
os.setsid()
os.umask(0)
try:
if os.fork():
os._exit(0)
except OSError:
logger.critical('failed to daemonize', exc_info=True)
sys.exit(1)
if sys.stdout.isatty():
sys.stdout.flush()
sys.stderr.flush()
devnull = '/dev/null'
stdin = open(devnull, 'r')
stdout = open(devnull, 'a+')
stderr = open(devnull, 'a+')
os.dup2(stdin.fileno(), sys.stdin.fileno())
os.dup2(stdout.fileno(), sys.stdout.fileno())
os.dup2(stderr.fileno(), sys.stderr.fileno())
logger.info('entered daemon mode')
def make_dirs(path):
'''Make all basedirs recursively as required.'''
try:
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
logger.debug('creating directories %r', dirname)
os.makedirs(dirname)
except OSError:
logger.error('failed to create directories', exc_info=True)
class PluginDirectory(object):
def __init__(self):
self.global_plugins = []
self.per_instance_plugins = []
def load(self):
''' Import plugin files '''
import uzbl.plugins
import pkgutil
path = uzbl.plugins.__path__
for impr, name, ispkg in pkgutil.iter_modules(path, 'uzbl.plugins.'):
__import__(name, globals(), locals())
from uzbl.ext import global_registry, per_instance_registry
self.global_plugins.extend(global_registry)
self.per_instance_plugins.extend(per_instance_registry)
class UzblEventDaemon(object):
def __init__(self, listener, plugind, opts, config):
listener.target = self
self.opts = opts
self.listener = listener
self.plugind = plugind
self.config = config
self._plugin_instances = []
self._quit = False
# Hold uzbl instances
# {child socket: Uzbl instance, ..}
self.uzbls = {}
self.plugins = {}
# Register that the event daemon server has started by creating the
# pid file.
make_pid_file(self.opts.pid_file)
# Register a function to clean up the socket and pid file on exit.
atexit.register(self.quit)
# Add signal handlers.
for sigint in [SIGTERM, SIGINT]:
signal(sigint, self.quit)
# Scan plugin directory for plugins
self.plugind.load()
# Initialise global plugins with instances in self.plugins
self.init_plugins()
def init_plugins(self):
'''Initialise event manager plugins.'''
self.plugins = {}
for plugin in self.plugind.global_plugins:
pinst = plugin(self)
self._plugin_instances.append(pinst)
self.plugins[plugin] = pinst
def run(self):
'''Main event daemon loop.'''
logger.debug('entering main loop')
if self.opts.daemon_mode:
# Daemonize the process
daemonize()
# Update the pid file
make_pid_file(opts.pid_file)
asyncore.loop()
# Clean up and exit
self.quit()
logger.debug('exiting main loop')
def add_instance(self, sock):
proto = Protocol(sock)
uzbl = Uzbl(self, proto, self.opts)
self.uzbls[sock] = uzbl
for plugin in self.plugins.values():
plugin.new_uzbl(uzbl)
def remove_instance(self, sock):
if sock in self.uzbls:
for plugin in self.plugins.values():
plugin.free_uzbl(self.uzbls[sock])
del self.uzbls[sock]
if not self.uzbls and self.opts.auto_close:
self.quit()
def close_server_socket(self):
'''Close and delete the server socket.'''
try:
self.listener.close()
except:
logger.error('failed to close server socket', exc_info=True)
def get_plugin_config(self, name):
if name not in self.config:
self.config.add_section(name)
return self.config[name]
def quit(self, sigint=None, *args):
'''Close all instance socket objects, server socket and delete the
pid file.'''
if sigint == SIGTERM:
logger.critical('caught SIGTERM, exiting')
elif sigint == SIGINT:
logger.critical('caught SIGINT, exiting')
elif not self._quit:
logger.debug('shutting down event manager')
self.close_server_socket()
for uzbl in list(self.uzbls.values()):
uzbl.close()
if not self._quit:
for plugin in self._plugin_instances:
plugin.cleanup()
del self.plugins # to avoid cyclic links
del self._plugin_instances
del_pid_file(self.opts.pid_file)
if not self._quit:
logger.info('event manager shut down')
self._quit = True
raise SystemExit()
def make_pid_file(pid_file):
'''Creates a pid file at `pid_file`, fails silently.'''
try:
logger.debug('creating pid file %r', pid_file)
make_dirs(pid_file)
pid = os.getpid()
fileobj = open(pid_file, 'w')
fileobj.write('%d' % pid)
fileobj.close()
logger.info('created pid file %r with pid %d', pid_file, pid)
except:
logger.error('failed to create pid file', exc_info=True)
def del_pid_file(pid_file):
'''Deletes a pid file at `pid_file`, fails silently.'''
if os.path.isfile(pid_file):
try:
logger.debug('deleting pid file %r', pid_file)
os.remove(pid_file)
logger.info('deleted pid file %r', pid_file)
except:
logger.error('failed to delete pid file', exc_info=True)
def get_pid(pid_file):
'''Reads a pid from pid file `pid_file`, fails None.'''
try:
logger.debug('reading pid file %r', pid_file)
fileobj = open(pid_file, 'r')
pid = int(fileobj.read())
fileobj.close()
logger.info('read pid %d from pid file %r', pid, pid_file)
return pid
except (IOError, ValueError):
logger.error('failed to read pid', exc_info=True)
return None
def pid_running(pid):
'''Checks if a process with a pid `pid` is running.'''
try:
os.kill(pid, 0)
except OSError:
return False
else:
return True
def term_process(pid):
'''Asks nicely then forces process with pid `pid` to exit.'''
try:
logger.info('sending SIGTERM to process with pid %r', pid)
os.kill(pid, SIGTERM)
except OSError:
logger.error(get_exc())
logger.debug('waiting for process with pid %r to exit', pid)
start = time.time()
while True:
if not pid_running(pid):
logger.debug('process with pid %d exit', pid)
return True
if (time.time() - start) > 5:
logger.warning('process with pid %d failed to exit', pid)
logger.info('sending SIGKILL to process with pid %d', pid)
try:
os.kill(pid, SIGKILL)
except:
logger.critical('failed to kill %d', pid, exc_info=True)
raise
if (time.time() - start) > 10:
logger.critical('unable to kill process with pid %d', pid)
raise OSError
time.sleep(0.25)
def stop_action(opts, config):
'''Stop the event manager daemon.'''
pid_file = opts.pid_file
if not os.path.isfile(pid_file):
logger.error('could not find running event manager with pid file %r',
pid_file)
return 1
pid = get_pid(pid_file)
if pid is None:
logger.error('unable to determine pid with pid file: %r', pid_file)
return 1
if not pid_running(pid):
logger.debug('no process with pid %r', pid)
del_pid_file(pid_file)
return 1
logger.debug('terminating process with pid %r', pid)
term_process(pid)
del_pid_file(pid_file)
logger.info('stopped event manager process with pid %d', pid)
return 0
def start_action(opts, config):
'''Start the event manager daemon.'''
pid_file = opts.pid_file
if os.path.isfile(pid_file):
pid = get_pid(pid_file)
if pid is None:
logger.error('unable to determine pid with pid file: %r', pid_file)
return 1
if pid_running(pid):
logger.error('event manager already started with pid %d', pid)
return 1
logger.info('no process with pid %d', pid)
del_pid_file(pid_file)
listener = Listener(opts.server_socket)
listener.start()
plugind = PluginDirectory()
daemon = UzblEventDaemon(listener, plugind, opts, config)
daemon.run()
return 0
def restart_action(opts, config):
'''Restart the event manager daemon.'''
stop_action(opts, config)
return start_action(opts, config)
def list_action(opts, config):
'''List all the plugins that would be loaded in the current search
dirs.'''
from types import ModuleType
import uzbl.plugins
import pkgutil
for line in pkgutil.iter_modules(uzbl.plugins.__path__, 'uzbl.plugins.'):
imp, name, ispkg = line
print(name)
return 0
def make_parser():
parser = OptionParser('usage: %prog [options] {start|stop|restart|list}')
add = parser.add_option
add('-v', '--verbose',
dest='verbose', default=2, action='count',
help='increase verbosity')
config_location = os.path.join(CONFIG_DIR, 'event-manager.conf')
add('-c', '--config',
dest='config', metavar='CONFIG', default=config_location,
help='configuration file')
socket_location = os.path.join(CACHE_DIR, 'event_daemon')
add('-s', '--server-socket',
dest='server_socket', metavar="SOCKET", default=socket_location,
help='server AF_UNIX socket location')
add('-p', '--pid-file',
metavar="FILE", dest='pid_file',
help='pid file location, defaults to server socket + .pid')
add('-n', '--no-daemon',
dest='daemon_mode', action='store_false', default=True,
help='do not daemonize the process')
add('-a', '--auto-close',
dest='auto_close', action='store_true', default=False,
help='auto close after all instances disconnect')
add('-o', '--log-file',
dest='log_file', metavar='FILE',
help='write logging output to a file, defaults to server socket +'
' .log')
add('-q', '--quiet-events',
dest='print_events', action="store_false", default=True,
help="silence the printing of events to stdout")
return parser
def init_logger(opts):
log_level = logging.CRITICAL - opts.verbose * 10
logger = logging.getLogger()
logger.setLevel(max(log_level, 10))
# Console
handler = logging.StreamHandler()
handler.setLevel(max(log_level + 10, 10))
handler.setFormatter(logging.Formatter(
'%(name)s: %(levelname)s: %(message)s'))
logger.addHandler(handler)
# Logfile
handler = logging.FileHandler(opts.log_file, 'a+', 'utf-8', 1)
handler.setLevel(max(log_level, 10))
handler.setFormatter(logging.Formatter(
'[%(created)f] %(name)s: %(levelname)s: %(message)s'))
logger.addHandler(handler)
def main():
parser = make_parser()
(opts, args) = parser.parse_args()
opts.server_socket = expandpath(opts.server_socket)
# Set default pid file location
if not opts.pid_file:
opts.pid_file = "%s.pid" % opts.server_socket
else:
opts.pid_file = expandpath(opts.pid_file)
config = configparser.ConfigParser(interpolation=configparser.ExtendedInterpolation)
config.read(opts.config)
# Set default log file location
if not opts.log_file:
opts.log_file = "%s.log" % opts.server_socket
else:
opts.log_file = expandpath(opts.log_file)
# Logging setup
init_logger(opts)
logger.info('logging to %r', opts.log_file)
if opts.auto_close:
logger.debug('will auto close')
else:
logger.debug('will not auto close')
if opts.daemon_mode:
logger.debug('will daemonize')
else:
logger.debug('will not daemonize')
# init like {start|stop|..} daemon actions
daemon_actions = {
'start': start_action,
'stop': stop_action,
'restart': restart_action,
'list': list_action,
}
if len(args) == 1:
action = args[0]
if action not in daemon_actions:
parser.error('invalid action: %r' % action)
elif not args:
action = 'start'
logger.warning('no daemon action given, assuming %r', action)
else:
parser.error('invalid action argument: %r' % args)
logger.info('daemon action %r', action)
# Do action
ret = daemon_actions[action](opts, config)
logger.debug('process CPU time: %f', time.clock())
return ret
if __name__ == "__main__":
sys.exit(main())
| marklee77/uzbl | uzbl/event_manager.py | Python | gpl-3.0 | 15,573 |
# coding: utf-8
import ConfigParser
import os
def main():
code, begin, end = None, None, None
try:
config = ConfigParser.RawConfigParser(allow_no_value=True)
config.read('info.ini')
code = config.get('config', 'code')
begin = config.get('config', 'begin')
end = config.get('config', 'end')
except Exception as e:
print 'Error while load config file.', e.message
if code and begin and end:
os.system('scrapy crawl -a where={code} -a begin={begin} -a end={end} -L INFO landdeal'.format(
code=code,
begin=begin,
end=end
))
if __name__ == "__main__":
main()
| sundiontheway/landchina-spider | manage.py | Python | mit | 680 |
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.capabilities.video import CapVideo, BaseVideo
from weboob.tools.backend import Module
from weboob.capabilities.collection import CapCollection, CollectionNotFound
from .browser import YoupornBrowser
from .video import YoupornVideo
__all__ = ['YoupornModule']
class YoupornModule(Module, CapVideo, CapCollection):
NAME = 'youporn'
MAINTAINER = u'Romain Bignon'
EMAIL = '[email protected]'
VERSION = '1.1'
DESCRIPTION = 'YouPorn pornographic video streaming website'
LICENSE = 'AGPLv3+'
BROWSER = YoupornBrowser
def get_video(self, _id):
return self.browser.get_video(_id)
SORTBY = ['relevance', 'rating', 'views', 'time']
def search_videos(self, pattern, sortby=CapVideo.SEARCH_RELEVANCE, nsfw=False):
if not nsfw:
return set()
return self.browser.search_videos(pattern, self.SORTBY[sortby])
def fill_video(self, video, fields):
return self.browser.get_video(video.id)
def iter_resources(self, objs, split_path):
if BaseVideo in objs:
collection = self.get_collection(objs, split_path)
if collection.path_level == 0:
yield self.get_collection(objs, [u'latest_nsfw'])
if collection.split_path == [u'latest_nsfw']:
for video in self.browser.latest_videos():
yield video
def validate_collection(self, objs, collection):
if collection.path_level == 0:
return
if BaseVideo in objs and collection.split_path == [u'latest_nsfw']:
collection.title = u'Latest YouPorn videos (NSFW)'
return
raise CollectionNotFound(collection.split_path)
OBJECTS = {YoupornVideo: fill_video}
| sputnick-dev/weboob | modules/youporn/module.py | Python | agpl-3.0 | 2,482 |
#!/usr/bin/env python
"""
Easy Install
------------
A tool for doing automatic download/extract/build of distutils-based Python
packages. For detailed documentation, see the accompanying EasyInstall.txt
file, or visit the `EasyInstall home page`__.
__ https://pythonhosted.org/setuptools/easy_install.html
"""
from glob import glob
from distutils.util import get_platform
from distutils.util import convert_path, subst_vars
from distutils.errors import DistutilsArgError, DistutilsOptionError, \
DistutilsError, DistutilsPlatformError
from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
from distutils import log, dir_util
from distutils.command.build_scripts import first_line_re
import sys
import os
import zipimport
import shutil
import tempfile
import zipfile
import re
import stat
import random
import platform
import textwrap
import warnings
import site
import struct
import contextlib
import subprocess
import shlex
import io
from setuptools import Command
from setuptools.sandbox import run_setup
from setuptools.py31compat import get_path, get_config_vars
from setuptools.command import setopt
from setuptools.archive_util import unpack_archive
from setuptools.package_index import PackageIndex
from setuptools.package_index import URL_SCHEME
from setuptools.command import bdist_egg, egg_info
from setuptools.compat import (iteritems, maxsize, basestring, unicode,
reraise, PY2, PY3)
from pkg_resources import (
yield_lines, normalize_path, resource_string, ensure_directory,
get_distribution, find_distributions, Environment, Requirement,
Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
VersionConflict, DEVELOP_DIST,
)
import pkg_resources
# Turn on PEP440Warnings
warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
__all__ = [
'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
'main', 'get_exe_prefixes',
]
def is_64bit():
return struct.calcsize("P") == 8
def samefile(p1, p2):
both_exist = os.path.exists(p1) and os.path.exists(p2)
use_samefile = hasattr(os.path, 'samefile') and both_exist
if use_samefile:
return os.path.samefile(p1, p2)
norm_p1 = os.path.normpath(os.path.normcase(p1))
norm_p2 = os.path.normpath(os.path.normcase(p2))
return norm_p1 == norm_p2
if PY2:
def _to_ascii(s):
return s
def isascii(s):
try:
unicode(s, 'ascii')
return True
except UnicodeError:
return False
else:
def _to_ascii(s):
return s.encode('ascii')
def isascii(s):
try:
s.encode('ascii')
return True
except UnicodeError:
return False
class easy_install(Command):
"""Manage a download/build/install process"""
description = "Find/get/install Python packages"
command_consumes_arguments = True
user_options = [
('prefix=', None, "installation prefix"),
("zip-ok", "z", "install package as a zipfile"),
("multi-version", "m", "make apps have to require() a version"),
("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
("install-dir=", "d", "install package to DIR"),
("script-dir=", "s", "install scripts to DIR"),
("exclude-scripts", "x", "Don't install scripts"),
("always-copy", "a", "Copy all needed packages to install dir"),
("index-url=", "i", "base URL of Python Package Index"),
("find-links=", "f", "additional URL(s) to search for packages"),
("build-directory=", "b",
"download/extract/build in DIR; keep the results"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('record=', None,
"filename in which to record list of installed files"),
('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
('site-dirs=', 'S', "list of directories where .pth files work"),
('editable', 'e', "Install specified packages in editable form"),
('no-deps', 'N', "don't install dependencies"),
('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
('local-snapshots-ok', 'l',
"allow building eggs from local checkouts"),
('version', None, "print version information and exit"),
('no-find-links', None,
"Don't load find-links defined in packages being installed")
]
boolean_options = [
'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
'editable',
'no-deps', 'local-snapshots-ok', 'version'
]
if site.ENABLE_USER_SITE:
help_msg = "install in user site-package '%s'" % site.USER_SITE
user_options.append(('user', None, help_msg))
boolean_options.append('user')
negative_opt = {'always-unzip': 'zip-ok'}
create_index = PackageIndex
def initialize_options(self):
# the --user option seems to be an opt-in one,
# so the default should be False.
self.user = 0
self.zip_ok = self.local_snapshots_ok = None
self.install_dir = self.script_dir = self.exclude_scripts = None
self.index_url = None
self.find_links = None
self.build_directory = None
self.args = None
self.optimize = self.record = None
self.upgrade = self.always_copy = self.multi_version = None
self.editable = self.no_deps = self.allow_hosts = None
self.root = self.prefix = self.no_report = None
self.version = None
self.install_purelib = None # for pure module distributions
self.install_platlib = None # non-pure (dists w/ extensions)
self.install_headers = None # for C/C++ headers
self.install_lib = None # set to either purelib or platlib
self.install_scripts = None
self.install_data = None
self.install_base = None
self.install_platbase = None
if site.ENABLE_USER_SITE:
self.install_userbase = site.USER_BASE
self.install_usersite = site.USER_SITE
else:
self.install_userbase = None
self.install_usersite = None
self.no_find_links = None
# Options not specifiable via command line
self.package_index = None
self.pth_file = self.always_copy_from = None
self.site_dirs = None
self.installed_projects = {}
self.sitepy_installed = False
# Always read easy_install options, even if we are subclassed, or have
# an independent instance created. This ensures that defaults will
# always come from the standard configuration file(s)' "easy_install"
# section, even if this is a "develop" or "install" command, or some
# other embedding.
self._dry_run = None
self.verbose = self.distribution.verbose
self.distribution._set_command_options(
self, self.distribution.get_option_dict('easy_install')
)
def delete_blockers(self, blockers):
extant_blockers = (
filename for filename in blockers
if os.path.exists(filename) or os.path.islink(filename)
)
list(map(self._delete_path, extant_blockers))
def _delete_path(self, path):
log.info("Deleting %s", path)
if self.dry_run:
return
is_tree = os.path.isdir(path) and not os.path.islink(path)
remover = rmtree if is_tree else os.unlink
remover(path)
@staticmethod
def _render_version():
"""
Render the Setuptools version and installation details, then exit.
"""
ver = sys.version[:3]
dist = get_distribution('setuptools')
tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
print(tmpl.format(**locals()))
raise SystemExit()
def finalize_options(self):
self.version and self._render_version()
py_version = sys.version.split()[0]
prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
self.config_vars = {
'dist_name': self.distribution.get_name(),
'dist_version': self.distribution.get_version(),
'dist_fullname': self.distribution.get_fullname(),
'py_version': py_version,
'py_version_short': py_version[0:3],
'py_version_nodot': py_version[0] + py_version[2],
'sys_prefix': prefix,
'prefix': prefix,
'sys_exec_prefix': exec_prefix,
'exec_prefix': exec_prefix,
# Only python 3.2+ has abiflags
'abiflags': getattr(sys, 'abiflags', ''),
}
if site.ENABLE_USER_SITE:
self.config_vars['userbase'] = self.install_userbase
self.config_vars['usersite'] = self.install_usersite
self._fix_install_dir_for_user_site()
self.expand_basedirs()
self.expand_dirs()
self._expand('install_dir', 'script_dir', 'build_directory',
'site_dirs')
# If a non-default installation directory was specified, default the
# script directory to match it.
if self.script_dir is None:
self.script_dir = self.install_dir
if self.no_find_links is None:
self.no_find_links = False
# Let install_dir get set by install_lib command, which in turn
# gets its info from the install command, and takes into account
# --prefix and --home and all that other crud.
self.set_undefined_options(
'install_lib', ('install_dir', 'install_dir')
)
# Likewise, set default script_dir from 'install_scripts.install_dir'
self.set_undefined_options(
'install_scripts', ('install_dir', 'script_dir')
)
if self.user and self.install_purelib:
self.install_dir = self.install_purelib
self.script_dir = self.install_scripts
# default --record from the install command
self.set_undefined_options('install', ('record', 'record'))
# Should this be moved to the if statement below? It's not used
# elsewhere
normpath = map(normalize_path, sys.path)
self.all_site_dirs = get_site_dirs()
if self.site_dirs is not None:
site_dirs = [
os.path.expanduser(s.strip()) for s in
self.site_dirs.split(',')
]
for d in site_dirs:
if not os.path.isdir(d):
log.warn("%s (in --site-dirs) does not exist", d)
elif normalize_path(d) not in normpath:
raise DistutilsOptionError(
d + " (in --site-dirs) is not on sys.path"
)
else:
self.all_site_dirs.append(normalize_path(d))
if not self.editable:
self.check_site_dir()
self.index_url = self.index_url or "https://pypi.python.org/simple"
self.shadow_path = self.all_site_dirs[:]
for path_item in self.install_dir, normalize_path(self.script_dir):
if path_item not in self.shadow_path:
self.shadow_path.insert(0, path_item)
if self.allow_hosts is not None:
hosts = [s.strip() for s in self.allow_hosts.split(',')]
else:
hosts = ['*']
if self.package_index is None:
self.package_index = self.create_index(
self.index_url, search_path=self.shadow_path, hosts=hosts,
)
self.local_index = Environment(self.shadow_path + sys.path)
if self.find_links is not None:
if isinstance(self.find_links, basestring):
self.find_links = self.find_links.split()
else:
self.find_links = []
if self.local_snapshots_ok:
self.package_index.scan_egg_links(self.shadow_path + sys.path)
if not self.no_find_links:
self.package_index.add_find_links(self.find_links)
self.set_undefined_options('install_lib', ('optimize', 'optimize'))
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
if not (0 <= self.optimize <= 2):
raise ValueError
except ValueError:
raise DistutilsOptionError("--optimize must be 0, 1, or 2")
if self.editable and not self.build_directory:
raise DistutilsArgError(
"Must specify a build directory (-b) when using --editable"
)
if not self.args:
raise DistutilsArgError(
"No urls, filenames, or requirements specified (see --help)")
self.outputs = []
def _fix_install_dir_for_user_site(self):
"""
Fix the install_dir if "--user" was used.
"""
if not self.user or not site.ENABLE_USER_SITE:
return
self.create_home_path()
if self.install_userbase is None:
msg = "User base directory is not specified"
raise DistutilsPlatformError(msg)
self.install_base = self.install_platbase = self.install_userbase
scheme_name = os.name.replace('posix', 'unix') + '_user'
self.select_scheme(scheme_name)
def _expand_attrs(self, attrs):
for attr in attrs:
val = getattr(self, attr)
if val is not None:
if os.name == 'posix' or os.name == 'nt':
val = os.path.expanduser(val)
val = subst_vars(val, self.config_vars)
setattr(self, attr, val)
def expand_basedirs(self):
"""Calls `os.path.expanduser` on install_base, install_platbase and
root."""
self._expand_attrs(['install_base', 'install_platbase', 'root'])
def expand_dirs(self):
"""Calls `os.path.expanduser` on install dirs."""
self._expand_attrs(['install_purelib', 'install_platlib',
'install_lib', 'install_headers',
'install_scripts', 'install_data', ])
def run(self):
if self.verbose != self.distribution.verbose:
log.set_verbosity(self.verbose)
try:
for spec in self.args:
self.easy_install(spec, not self.no_deps)
if self.record:
outputs = self.outputs
if self.root: # strip any package prefix
root_len = len(self.root)
for counter in range(len(outputs)):
outputs[counter] = outputs[counter][root_len:]
from distutils import file_util
self.execute(
file_util.write_file, (self.record, outputs),
"writing list of installed files to '%s'" %
self.record
)
self.warn_deprecated_options()
finally:
log.set_verbosity(self.distribution.verbose)
def pseudo_tempname(self):
"""Return a pseudo-tempname base in the install directory.
This code is intentionally naive; if a malicious party can write to
the target directory you're already in deep doodoo.
"""
try:
pid = os.getpid()
except:
pid = random.randint(0, maxsize)
return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
def warn_deprecated_options(self):
pass
def check_site_dir(self):
"""Verify that self.install_dir is .pth-capable dir, if needed"""
instdir = normalize_path(self.install_dir)
pth_file = os.path.join(instdir, 'easy-install.pth')
# Is it a configured, PYTHONPATH, implicit, or explicit site dir?
is_site_dir = instdir in self.all_site_dirs
if not is_site_dir and not self.multi_version:
# No? Then directly test whether it does .pth file processing
is_site_dir = self.check_pth_processing()
else:
# make sure we can write to target dir
testfile = self.pseudo_tempname() + '.write-test'
test_exists = os.path.exists(testfile)
try:
if test_exists:
os.unlink(testfile)
open(testfile, 'w').close()
os.unlink(testfile)
except (OSError, IOError):
self.cant_write_to_target()
if not is_site_dir and not self.multi_version:
# Can't install non-multi to non-site dir
raise DistutilsError(self.no_default_version_msg())
if is_site_dir:
if self.pth_file is None:
self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
else:
self.pth_file = None
PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep)
if instdir not in map(normalize_path, filter(None, PYTHONPATH)):
# only PYTHONPATH dirs need a site.py, so pretend it's there
self.sitepy_installed = True
elif self.multi_version and not os.path.exists(pth_file):
self.sitepy_installed = True # don't need site.py in this case
self.pth_file = None # and don't create a .pth file
self.install_dir = instdir
__cant_write_msg = textwrap.dedent("""
can't create or remove files in install directory
The following error occurred while trying to add or remove files in the
installation directory:
%s
The installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
""").lstrip()
__not_exists_id = textwrap.dedent("""
This directory does not currently exist. Please create it and try again, or
choose a different installation directory (using the -d or --install-dir
option).
""").lstrip()
__access_msg = textwrap.dedent("""
Perhaps your account does not have write access to this directory? If the
installation directory is a system-owned directory, you may need to sign in
as the administrator or "root" account. If you do not have administrative
access to this machine, you may wish to choose a different installation
directory, preferably one that is listed in your PYTHONPATH environment
variable.
For information on other options, you may wish to consult the
documentation at:
https://pythonhosted.org/setuptools/easy_install.html
Please make the appropriate changes for your system and try again.
""").lstrip()
def cant_write_to_target(self):
msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
if not os.path.exists(self.install_dir):
msg += '\n' + self.__not_exists_id
else:
msg += '\n' + self.__access_msg
raise DistutilsError(msg)
def check_pth_processing(self):
"""Empirically verify whether .pth files are supported in inst. dir"""
instdir = self.install_dir
log.info("Checking .pth file support in %s", instdir)
pth_file = self.pseudo_tempname() + ".pth"
ok_file = pth_file + '.ok'
ok_exists = os.path.exists(ok_file)
try:
if ok_exists:
os.unlink(ok_file)
dirname = os.path.dirname(ok_file)
if not os.path.exists(dirname):
os.makedirs(dirname)
f = open(pth_file, 'w')
except (OSError, IOError):
self.cant_write_to_target()
else:
try:
f.write("import os; f = open(%r, 'w'); f.write('OK'); "
"f.close()\n" % (ok_file,))
f.close()
f = None
executable = sys.executable
if os.name == 'nt':
dirname, basename = os.path.split(executable)
alt = os.path.join(dirname, 'pythonw.exe')
if (basename.lower() == 'python.exe' and
os.path.exists(alt)):
# use pythonw.exe to avoid opening a console window
executable = alt
from distutils.spawn import spawn
spawn([executable, '-E', '-c', 'pass'], 0)
if os.path.exists(ok_file):
log.info(
"TEST PASSED: %s appears to support .pth files",
instdir
)
return True
finally:
if f:
f.close()
if os.path.exists(ok_file):
os.unlink(ok_file)
if os.path.exists(pth_file):
os.unlink(pth_file)
if not self.multi_version:
log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
return False
def install_egg_scripts(self, dist):
"""Write all the scripts for `dist`, unless scripts are excluded"""
if not self.exclude_scripts and dist.metadata_isdir('scripts'):
for script_name in dist.metadata_listdir('scripts'):
if dist.metadata_isdir('scripts/' + script_name):
# The "script" is a directory, likely a Python 3
# __pycache__ directory, so skip it.
continue
self.install_script(
dist, script_name,
dist.get_metadata('scripts/' + script_name)
)
self.install_wrapper_scripts(dist)
def add_output(self, path):
if os.path.isdir(path):
for base, dirs, files in os.walk(path):
for filename in files:
self.outputs.append(os.path.join(base, filename))
else:
self.outputs.append(path)
def not_editable(self, spec):
if self.editable:
raise DistutilsArgError(
"Invalid argument %r: you can't use filenames or URLs "
"with --editable (except via the --find-links option)."
% (spec,)
)
def check_editable(self, spec):
if not self.editable:
return
if os.path.exists(os.path.join(self.build_directory, spec.key)):
raise DistutilsArgError(
"%r already exists in %s; can't do a checkout there" %
(spec.key, self.build_directory)
)
def easy_install(self, spec, deps=False):
tmpdir = tempfile.mkdtemp(prefix="easy_install-")
download = None
if not self.editable:
self.install_site_py()
try:
if not isinstance(spec, Requirement):
if URL_SCHEME(spec):
# It's a url, download it to tmpdir and process
self.not_editable(spec)
download = self.package_index.download(spec, tmpdir)
return self.install_item(None, download, tmpdir, deps,
True)
elif os.path.exists(spec):
# Existing file or directory, just process it directly
self.not_editable(spec)
return self.install_item(None, spec, tmpdir, deps, True)
else:
spec = parse_requirement_arg(spec)
self.check_editable(spec)
dist = self.package_index.fetch_distribution(
spec, tmpdir, self.upgrade, self.editable,
not self.always_copy, self.local_index
)
if dist is None:
msg = "Could not find suitable distribution for %r" % spec
if self.always_copy:
msg += " (--always-copy skips system and development eggs)"
raise DistutilsError(msg)
elif dist.precedence == DEVELOP_DIST:
# .egg-info dists don't need installing, just process deps
self.process_distribution(spec, dist, deps, "Using")
return dist
else:
return self.install_item(spec, dist.location, tmpdir, deps)
finally:
if os.path.exists(tmpdir):
rmtree(tmpdir)
def install_item(self, spec, download, tmpdir, deps, install_needed=False):
# Installation is also needed if file in tmpdir or is not an egg
install_needed = install_needed or self.always_copy
install_needed = install_needed or os.path.dirname(download) == tmpdir
install_needed = install_needed or not download.endswith('.egg')
install_needed = install_needed or (
self.always_copy_from is not None and
os.path.dirname(normalize_path(download)) ==
normalize_path(self.always_copy_from)
)
if spec and not install_needed:
# at this point, we know it's a local .egg, we just don't know if
# it's already installed.
for dist in self.local_index[spec.project_name]:
if dist.location == download:
break
else:
install_needed = True # it's not in the local index
log.info("Processing %s", os.path.basename(download))
if install_needed:
dists = self.install_eggs(spec, download, tmpdir)
for dist in dists:
self.process_distribution(spec, dist, deps)
else:
dists = [self.egg_distribution(download)]
self.process_distribution(spec, dists[0], deps, "Using")
if spec is not None:
for dist in dists:
if dist in spec:
return dist
def select_scheme(self, name):
"""Sets the install directories by applying the install schemes."""
# it's the caller's problem if they supply a bad name!
scheme = INSTALL_SCHEMES[name]
for key in SCHEME_KEYS:
attrname = 'install_' + key
if getattr(self, attrname) is None:
setattr(self, attrname, scheme[key])
def process_distribution(self, requirement, dist, deps=True, *info):
self.update_pth(dist)
self.package_index.add(dist)
if dist in self.local_index[dist.key]:
self.local_index.remove(dist)
self.local_index.add(dist)
self.install_egg_scripts(dist)
self.installed_projects[dist.key] = dist
log.info(self.installation_report(requirement, dist, *info))
if (dist.has_metadata('dependency_links.txt') and
not self.no_find_links):
self.package_index.add_find_links(
dist.get_metadata_lines('dependency_links.txt')
)
if not deps and not self.always_copy:
return
elif requirement is not None and dist.key != requirement.key:
log.warn("Skipping dependencies for %s", dist)
return # XXX this is not the distribution we were looking for
elif requirement is None or dist not in requirement:
# if we wound up with a different version, resolve what we've got
distreq = dist.as_requirement()
requirement = requirement or distreq
requirement = Requirement(
distreq.project_name, distreq.specs, requirement.extras
)
log.info("Processing dependencies for %s", requirement)
try:
distros = WorkingSet([]).resolve(
[requirement], self.local_index, self.easy_install
)
except DistributionNotFound as e:
raise DistutilsError(str(e))
except VersionConflict as e:
raise DistutilsError(e.report())
if self.always_copy or self.always_copy_from:
# Force all the relevant distros to be copied or activated
for dist in distros:
if dist.key not in self.installed_projects:
self.easy_install(dist.as_requirement())
log.info("Finished processing dependencies for %s", requirement)
def should_unzip(self, dist):
if self.zip_ok is not None:
return not self.zip_ok
if dist.has_metadata('not-zip-safe'):
return True
if not dist.has_metadata('zip-safe'):
return True
return False
def maybe_move(self, spec, dist_filename, setup_base):
dst = os.path.join(self.build_directory, spec.key)
if os.path.exists(dst):
msg = ("%r already exists in %s; build directory %s will not be "
"kept")
log.warn(msg, spec.key, self.build_directory, setup_base)
return setup_base
if os.path.isdir(dist_filename):
setup_base = dist_filename
else:
if os.path.dirname(dist_filename) == setup_base:
os.unlink(dist_filename) # get it out of the tmp dir
contents = os.listdir(setup_base)
if len(contents) == 1:
dist_filename = os.path.join(setup_base, contents[0])
if os.path.isdir(dist_filename):
# if the only thing there is a directory, move it instead
setup_base = dist_filename
ensure_directory(dst)
shutil.move(setup_base, dst)
return dst
def install_wrapper_scripts(self, dist):
if self.exclude_scripts:
return
for args in ScriptWriter.best().get_args(dist):
self.write_script(*args)
def install_script(self, dist, script_name, script_text, dev_path=None):
"""Generate a legacy script wrapper and install it"""
spec = str(dist.as_requirement())
is_script = is_python_script(script_text, script_name)
if is_script:
script_text = (ScriptWriter.get_header(script_text) +
self._load_template(dev_path) % locals())
self.write_script(script_name, _to_ascii(script_text), 'b')
@staticmethod
def _load_template(dev_path):
"""
There are a couple of template scripts in the package. This
function loads one of them and prepares it for use.
"""
# See https://bitbucket.org/pypa/setuptools/issue/134 for info
# on script file naming and downstream issues with SVR4
name = 'script.tmpl'
if dev_path:
name = name.replace('.tmpl', ' (dev).tmpl')
raw_bytes = resource_string('setuptools', name)
return raw_bytes.decode('utf-8')
def write_script(self, script_name, contents, mode="t", blockers=()):
"""Write an executable file to the scripts directory"""
self.delete_blockers( # clean up old .py/.pyw w/o a script
[os.path.join(self.script_dir, x) for x in blockers]
)
log.info("Installing %s script to %s", script_name, self.script_dir)
target = os.path.join(self.script_dir, script_name)
self.add_output(target)
mask = current_umask()
if not self.dry_run:
ensure_directory(target)
if os.path.exists(target):
os.unlink(target)
f = open(target, "w" + mode)
f.write(contents)
f.close()
chmod(target, 0o777 - mask)
def install_eggs(self, spec, dist_filename, tmpdir):
# .egg dirs or files are already built, so just return them
if dist_filename.lower().endswith('.egg'):
return [self.install_egg(dist_filename, tmpdir)]
elif dist_filename.lower().endswith('.exe'):
return [self.install_exe(dist_filename, tmpdir)]
# Anything else, try to extract and build
setup_base = tmpdir
if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
unpack_archive(dist_filename, tmpdir, self.unpack_progress)
elif os.path.isdir(dist_filename):
setup_base = os.path.abspath(dist_filename)
if (setup_base.startswith(tmpdir) # something we downloaded
and self.build_directory and spec is not None):
setup_base = self.maybe_move(spec, dist_filename, setup_base)
# Find the setup.py file
setup_script = os.path.join(setup_base, 'setup.py')
if not os.path.exists(setup_script):
setups = glob(os.path.join(setup_base, '*', 'setup.py'))
if not setups:
raise DistutilsError(
"Couldn't find a setup script in %s" %
os.path.abspath(dist_filename)
)
if len(setups) > 1:
raise DistutilsError(
"Multiple setup scripts in %s" %
os.path.abspath(dist_filename)
)
setup_script = setups[0]
# Now run it, and return the result
if self.editable:
log.info(self.report_editable(spec, setup_script))
return []
else:
return self.build_and_install(setup_script, setup_base)
def egg_distribution(self, egg_path):
if os.path.isdir(egg_path):
metadata = PathMetadata(egg_path, os.path.join(egg_path,
'EGG-INFO'))
else:
metadata = EggMetadata(zipimport.zipimporter(egg_path))
return Distribution.from_filename(egg_path, metadata=metadata)
def install_egg(self, egg_path, tmpdir):
destination = os.path.join(self.install_dir,
os.path.basename(egg_path))
destination = os.path.abspath(destination)
if not self.dry_run:
ensure_directory(destination)
dist = self.egg_distribution(egg_path)
if not samefile(egg_path, destination):
if os.path.isdir(destination) and not os.path.islink(destination):
dir_util.remove_tree(destination, dry_run=self.dry_run)
elif os.path.exists(destination):
self.execute(os.unlink, (destination,), "Removing " +
destination)
try:
new_dist_is_zipped = False
if os.path.isdir(egg_path):
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copytree, "Copying"
elif self.should_unzip(dist):
self.mkpath(destination)
f, m = self.unpack_and_compile, "Extracting"
else:
new_dist_is_zipped = True
if egg_path.startswith(tmpdir):
f, m = shutil.move, "Moving"
else:
f, m = shutil.copy2, "Copying"
self.execute(f, (egg_path, destination),
(m + " %s to %s") %
(os.path.basename(egg_path),
os.path.dirname(destination)))
update_dist_caches(destination,
fix_zipimporter_caches=new_dist_is_zipped)
except:
update_dist_caches(destination, fix_zipimporter_caches=False)
raise
self.add_output(destination)
return self.egg_distribution(destination)
def install_exe(self, dist_filename, tmpdir):
# See if it's valid, get data
cfg = extract_wininst_cfg(dist_filename)
if cfg is None:
raise DistutilsError(
"%s is not a valid distutils Windows .exe" % dist_filename
)
# Create a dummy distribution object until we build the real distro
dist = Distribution(
None,
project_name=cfg.get('metadata', 'name'),
version=cfg.get('metadata', 'version'), platform=get_platform(),
)
# Convert the .exe to an unpacked egg
egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() +
'.egg')
egg_tmp = egg_path + '.tmp'
_egg_info = os.path.join(egg_tmp, 'EGG-INFO')
pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
self.exe_to_egg(dist_filename, egg_tmp)
# Write EGG-INFO/PKG-INFO
if not os.path.exists(pkg_inf):
f = open(pkg_inf, 'w')
f.write('Metadata-Version: 1.0\n')
for k, v in cfg.items('metadata'):
if k != 'target_version':
f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
f.close()
script_dir = os.path.join(_egg_info, 'scripts')
# delete entry-point scripts to avoid duping
self.delete_blockers(
[os.path.join(script_dir, args[0]) for args in
ScriptWriter.get_args(dist)]
)
# Build .egg file from tmpdir
bdist_egg.make_zipfile(
egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
)
# install the .egg
return self.install_egg(egg_path, tmpdir)
def exe_to_egg(self, dist_filename, egg_tmp):
"""Extract a bdist_wininst to the directories an egg would use"""
# Check for .pth file and set up prefix translations
prefixes = get_exe_prefixes(dist_filename)
to_compile = []
native_libs = []
top_level = {}
def process(src, dst):
s = src.lower()
for old, new in prefixes:
if s.startswith(old):
src = new + src[len(old):]
parts = src.split('/')
dst = os.path.join(egg_tmp, *parts)
dl = dst.lower()
if dl.endswith('.pyd') or dl.endswith('.dll'):
parts[-1] = bdist_egg.strip_module(parts[-1])
top_level[os.path.splitext(parts[0])[0]] = 1
native_libs.append(src)
elif dl.endswith('.py') and old != 'SCRIPTS/':
top_level[os.path.splitext(parts[0])[0]] = 1
to_compile.append(dst)
return dst
if not src.endswith('.pth'):
log.warn("WARNING: can't process %s", src)
return None
# extract, tracking .pyd/.dll->native_libs and .py -> to_compile
unpack_archive(dist_filename, egg_tmp, process)
stubs = []
for res in native_libs:
if res.lower().endswith('.pyd'): # create stubs for .pyd's
parts = res.split('/')
resource = parts[-1]
parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
pyfile = os.path.join(egg_tmp, *parts)
to_compile.append(pyfile)
stubs.append(pyfile)
bdist_egg.write_stub(resource, pyfile)
self.byte_compile(to_compile) # compile .py's
bdist_egg.write_safety_flag(
os.path.join(egg_tmp, 'EGG-INFO'),
bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
for name in 'top_level', 'native_libs':
if locals()[name]:
txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
if not os.path.exists(txt):
f = open(txt, 'w')
f.write('\n'.join(locals()[name]) + '\n')
f.close()
__mv_warning = textwrap.dedent("""
Because this distribution was installed --multi-version, before you can
import modules from this package in an application, you will need to
'import pkg_resources' and then use a 'require()' call similar to one of
these examples, in order to select the desired version:
pkg_resources.require("%(name)s") # latest installed version
pkg_resources.require("%(name)s==%(version)s") # this exact version
pkg_resources.require("%(name)s>=%(version)s") # this version or higher
""").lstrip()
__id_warning = textwrap.dedent("""
Note also that the installation directory must be on sys.path at runtime for
this to work. (e.g. by being the application's script directory, by being on
PYTHONPATH, or by being added to sys.path by your code.)
""")
def installation_report(self, req, dist, what="Installed"):
"""Helpful installation message for display to package users"""
msg = "\n%(what)s %(eggloc)s%(extras)s"
if self.multi_version and not self.no_report:
msg += '\n' + self.__mv_warning
if self.install_dir not in map(normalize_path, sys.path):
msg += '\n' + self.__id_warning
eggloc = dist.location
name = dist.project_name
version = dist.version
extras = '' # TODO: self.report_extras(req, dist)
return msg % locals()
__editable_msg = textwrap.dedent("""
Extracted editable version of %(spec)s to %(dirname)s
If it uses setuptools in its setup script, you can activate it in
"development" mode by going to that directory and running::
%(python)s setup.py develop
See the setuptools documentation for the "develop" command for more info.
""").lstrip()
def report_editable(self, spec, setup_script):
dirname = os.path.dirname(setup_script)
python = sys.executable
return '\n' + self.__editable_msg % locals()
def run_setup(self, setup_script, setup_base, args):
sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
sys.modules.setdefault('distutils.command.egg_info', egg_info)
args = list(args)
if self.verbose > 2:
v = 'v' * (self.verbose - 1)
args.insert(0, '-' + v)
elif self.verbose < 2:
args.insert(0, '-q')
if self.dry_run:
args.insert(0, '-n')
log.info(
"Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
)
try:
run_setup(setup_script, args)
except SystemExit as v:
raise DistutilsError("Setup script exited with %s" % (v.args[0],))
def build_and_install(self, setup_script, setup_base):
args = ['bdist_egg', '--dist-dir']
dist_dir = tempfile.mkdtemp(
prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
)
try:
self._set_fetcher_options(os.path.dirname(setup_script))
args.append(dist_dir)
self.run_setup(setup_script, setup_base, args)
all_eggs = Environment([dist_dir])
eggs = []
for key in all_eggs:
for dist in all_eggs[key]:
eggs.append(self.install_egg(dist.location, setup_base))
if not eggs and not self.dry_run:
log.warn("No eggs found in %s (setup script problem?)",
dist_dir)
return eggs
finally:
rmtree(dist_dir)
log.set_verbosity(self.verbose) # restore our log verbosity
def _set_fetcher_options(self, base):
"""
When easy_install is about to run bdist_egg on a source dist, that
source dist might have 'setup_requires' directives, requiring
additional fetching. Ensure the fetcher options given to easy_install
are available to that command as well.
"""
# find the fetch options from easy_install and write them out
# to the setup.cfg file.
ei_opts = self.distribution.get_option_dict('easy_install').copy()
fetch_directives = (
'find_links', 'site_dirs', 'index_url', 'optimize',
'site_dirs', 'allow_hosts',
)
fetch_options = {}
for key, val in ei_opts.items():
if key not in fetch_directives:
continue
fetch_options[key.replace('_', '-')] = val[1]
# create a settings dictionary suitable for `edit_config`
settings = dict(easy_install=fetch_options)
cfg_filename = os.path.join(base, 'setup.cfg')
setopt.edit_config(cfg_filename, settings)
def update_pth(self, dist):
if self.pth_file is None:
return
for d in self.pth_file[dist.key]: # drop old entries
if self.multi_version or d.location != dist.location:
log.info("Removing %s from easy-install.pth file", d)
self.pth_file.remove(d)
if d.location in self.shadow_path:
self.shadow_path.remove(d.location)
if not self.multi_version:
if dist.location in self.pth_file.paths:
log.info(
"%s is already the active version in easy-install.pth",
dist
)
else:
log.info("Adding %s to easy-install.pth file", dist)
self.pth_file.add(dist) # add new entry
if dist.location not in self.shadow_path:
self.shadow_path.append(dist.location)
if not self.dry_run:
self.pth_file.save()
if dist.key == 'setuptools':
# Ensure that setuptools itself never becomes unavailable!
# XXX should this check for latest version?
filename = os.path.join(self.install_dir, 'setuptools.pth')
if os.path.islink(filename):
os.unlink(filename)
f = open(filename, 'wt')
f.write(self.pth_file.make_relative(dist.location) + '\n')
f.close()
def unpack_progress(self, src, dst):
# Progress filter for unpacking
log.debug("Unpacking %s to %s", src, dst)
return dst # only unpack-and-compile skips files for dry run
def unpack_and_compile(self, egg_path, destination):
to_compile = []
to_chmod = []
def pf(src, dst):
if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
to_compile.append(dst)
elif dst.endswith('.dll') or dst.endswith('.so'):
to_chmod.append(dst)
self.unpack_progress(src, dst)
return not self.dry_run and dst or None
unpack_archive(egg_path, destination, pf)
self.byte_compile(to_compile)
if not self.dry_run:
for f in to_chmod:
mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
chmod(f, mode)
def byte_compile(self, to_compile):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
try:
# try to make the byte compile messages quieter
log.set_verbosity(self.verbose - 1)
byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
if self.optimize:
byte_compile(
to_compile, optimize=self.optimize, force=1,
dry_run=self.dry_run
)
finally:
log.set_verbosity(self.verbose) # restore original verbosity
__no_default_msg = textwrap.dedent("""
bad install directory or PYTHONPATH
You are attempting to install a package to a directory that is not
on PYTHONPATH and which Python does not read ".pth" files from. The
installation directory you specified (via --install-dir, --prefix, or
the distutils default setting) was:
%s
and your PYTHONPATH environment variable currently contains:
%r
Here are some of your options for correcting the problem:
* You can choose a different installation directory, i.e., one that is
on PYTHONPATH or supports .pth files
* You can add the installation directory to the PYTHONPATH environment
variable. (It must then also be on PYTHONPATH whenever you run
Python and want to use the package(s) you are installing.)
* You can set up the installation directory to support ".pth" files by
using one of the approaches described here:
https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations
Please make the appropriate changes for your system and try again.""").lstrip()
def no_default_version_msg(self):
template = self.__no_default_msg
return template % (self.install_dir, os.environ.get('PYTHONPATH', ''))
def install_site_py(self):
"""Make sure there's a site.py in the target dir, if needed"""
if self.sitepy_installed:
return # already did it, or don't need to
sitepy = os.path.join(self.install_dir, "site.py")
source = resource_string("setuptools", "site-patch.py")
current = ""
if os.path.exists(sitepy):
log.debug("Checking existing site.py in %s", self.install_dir)
f = open(sitepy, 'rb')
current = f.read()
# we want str, not bytes
if PY3:
current = current.decode()
f.close()
if not current.startswith('def __boot():'):
raise DistutilsError(
"%s is not a setuptools-generated site.py; please"
" remove it." % sitepy
)
if current != source:
log.info("Creating %s", sitepy)
if not self.dry_run:
ensure_directory(sitepy)
f = open(sitepy, 'wb')
f.write(source)
f.close()
self.byte_compile([sitepy])
self.sitepy_installed = True
def create_home_path(self):
"""Create directories under ~."""
if not self.user:
return
home = convert_path(os.path.expanduser("~"))
for name, path in iteritems(self.config_vars):
if path.startswith(home) and not os.path.isdir(path):
self.debug_print("os.makedirs('%s', 0o700)" % path)
os.makedirs(path, 0o700)
INSTALL_SCHEMES = dict(
posix=dict(
install_dir='$base/lib/python$py_version_short/site-packages',
script_dir='$base/bin',
),
)
DEFAULT_SCHEME = dict(
install_dir='$base/Lib/site-packages',
script_dir='$base/Scripts',
)
def _expand(self, *attrs):
config_vars = self.get_finalized_command('install').config_vars
if self.prefix:
# Set default install_dir/scripts from --prefix
config_vars = config_vars.copy()
config_vars['base'] = self.prefix
scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
for attr, val in scheme.items():
if getattr(self, attr, None) is None:
setattr(self, attr, val)
from distutils.util import subst_vars
for attr in attrs:
val = getattr(self, attr)
if val is not None:
val = subst_vars(val, config_vars)
if os.name == 'posix':
val = os.path.expanduser(val)
setattr(self, attr, val)
def get_site_dirs():
# return a list of 'site' dirs
sitedirs = [_f for _f in os.environ.get('PYTHONPATH',
'').split(os.pathsep) if _f]
prefixes = [sys.prefix]
if sys.exec_prefix != sys.prefix:
prefixes.append(sys.exec_prefix)
for prefix in prefixes:
if prefix:
if sys.platform in ('os2emx', 'riscos'):
sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
elif os.sep == '/':
sitedirs.extend([os.path.join(prefix,
"lib",
"python" + sys.version[:3],
"site-packages"),
os.path.join(prefix, "lib", "site-python")])
else:
sitedirs.extend(
[prefix, os.path.join(prefix, "lib", "site-packages")]
)
if sys.platform == 'darwin':
# for framework builds *only* we add the standard Apple
# locations. Currently only per-user, but /Library and
# /Network/Library could be added too
if 'Python.framework' in prefix:
home = os.environ.get('HOME')
if home:
sitedirs.append(
os.path.join(home,
'Library',
'Python',
sys.version[:3],
'site-packages'))
lib_paths = get_path('purelib'), get_path('platlib')
for site_lib in lib_paths:
if site_lib not in sitedirs:
sitedirs.append(site_lib)
if site.ENABLE_USER_SITE:
sitedirs.append(site.USER_SITE)
sitedirs = list(map(normalize_path, sitedirs))
return sitedirs
def expand_paths(inputs):
"""Yield sys.path directories that might contain "old-style" packages"""
seen = {}
for dirname in inputs:
dirname = normalize_path(dirname)
if dirname in seen:
continue
seen[dirname] = 1
if not os.path.isdir(dirname):
continue
files = os.listdir(dirname)
yield dirname, files
for name in files:
if not name.endswith('.pth'):
# We only care about the .pth files
continue
if name in ('easy-install.pth', 'setuptools.pth'):
# Ignore .pth files that we control
continue
# Read the .pth file
f = open(os.path.join(dirname, name))
lines = list(yield_lines(f))
f.close()
# Yield existing non-dupe, non-import directory lines from it
for line in lines:
if not line.startswith("import"):
line = normalize_path(line.rstrip())
if line not in seen:
seen[line] = 1
if not os.path.isdir(line):
continue
yield line, os.listdir(line)
def extract_wininst_cfg(dist_filename):
"""Extract configuration data from a bdist_wininst .exe
Returns a ConfigParser.RawConfigParser, or None
"""
f = open(dist_filename, 'rb')
try:
endrec = zipfile._EndRecData(f)
if endrec is None:
return None
prepended = (endrec[9] - endrec[5]) - endrec[6]
if prepended < 12: # no wininst data here
return None
f.seek(prepended - 12)
from setuptools.compat import StringIO, ConfigParser
import struct
tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
if tag not in (0x1234567A, 0x1234567B):
return None # not a valid tag
f.seek(prepended - (12 + cfglen))
cfg = ConfigParser.RawConfigParser(
{'version': '', 'target_version': ''})
try:
part = f.read(cfglen)
# Read up to the first null byte.
config = part.split(b'\0', 1)[0]
# Now the config is in bytes, but for RawConfigParser, it should
# be text, so decode it.
config = config.decode(sys.getfilesystemencoding())
cfg.readfp(StringIO(config))
except ConfigParser.Error:
return None
if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
return None
return cfg
finally:
f.close()
def get_exe_prefixes(exe_filename):
"""Get exe->egg path translations for a given .exe file"""
prefixes = [
('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''),
('PLATLIB/', ''),
('SCRIPTS/', 'EGG-INFO/scripts/'),
('DATA/lib/site-packages', ''),
]
z = zipfile.ZipFile(exe_filename)
try:
for info in z.infolist():
name = info.filename
parts = name.split('/')
if len(parts) == 3 and parts[2] == 'PKG-INFO':
if parts[1].endswith('.egg-info'):
prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
break
if len(parts) != 2 or not name.endswith('.pth'):
continue
if name.endswith('-nspkg.pth'):
continue
if parts[0].upper() in ('PURELIB', 'PLATLIB'):
contents = z.read(name)
if PY3:
contents = contents.decode()
for pth in yield_lines(contents):
pth = pth.strip().replace('\\', '/')
if not pth.startswith('import'):
prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
finally:
z.close()
prefixes = [(x.lower(), y) for x, y in prefixes]
prefixes.sort()
prefixes.reverse()
return prefixes
def parse_requirement_arg(spec):
try:
return Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" % (spec,)
)
class PthDistributions(Environment):
"""A .pth file with Distribution paths in it"""
dirty = False
def __init__(self, filename, sitedirs=()):
self.filename = filename
self.sitedirs = list(map(normalize_path, sitedirs))
self.basedir = normalize_path(os.path.dirname(self.filename))
self._load()
Environment.__init__(self, [], None, None)
for path in yield_lines(self.paths):
list(map(self.add, find_distributions(path, True)))
def _load(self):
self.paths = []
saw_import = False
seen = dict.fromkeys(self.sitedirs)
if os.path.isfile(self.filename):
f = open(self.filename, 'rt')
for line in f:
if line.startswith('import'):
saw_import = True
continue
path = line.rstrip()
self.paths.append(path)
if not path.strip() or path.strip().startswith('#'):
continue
# skip non-existent paths, in case somebody deleted a package
# manually, and duplicate paths as well
path = self.paths[-1] = normalize_path(
os.path.join(self.basedir, path)
)
if not os.path.exists(path) or path in seen:
self.paths.pop() # skip it
self.dirty = True # we cleaned up, so we're dirty now :)
continue
seen[path] = 1
f.close()
if self.paths and not saw_import:
self.dirty = True # ensure anything we touch has import wrappers
while self.paths and not self.paths[-1].strip():
self.paths.pop()
def save(self):
"""Write changed .pth file back to disk"""
if not self.dirty:
return
rel_paths = list(map(self.make_relative, self.paths))
if rel_paths:
log.debug("Saving %s", self.filename)
lines = self._wrap_lines(rel_paths)
data = '\n'.join(lines) + '\n'
if os.path.islink(self.filename):
os.unlink(self.filename)
with open(self.filename, 'wt') as f:
f.write(data)
elif os.path.exists(self.filename):
log.debug("Deleting empty %s", self.filename)
os.unlink(self.filename)
self.dirty = False
@staticmethod
def _wrap_lines(lines):
return lines
def add(self, dist):
"""Add `dist` to the distribution map"""
new_path = (
dist.location not in self.paths and (
dist.location not in self.sitedirs or
# account for '.' being in PYTHONPATH
dist.location == os.getcwd()
)
)
if new_path:
self.paths.append(dist.location)
self.dirty = True
Environment.add(self, dist)
def remove(self, dist):
"""Remove `dist` from the distribution map"""
while dist.location in self.paths:
self.paths.remove(dist.location)
self.dirty = True
Environment.remove(self, dist)
def make_relative(self, path):
npath, last = os.path.split(normalize_path(path))
baselen = len(self.basedir)
parts = [last]
sep = os.altsep == '/' and '/' or os.sep
while len(npath) >= baselen:
if npath == self.basedir:
parts.append(os.curdir)
parts.reverse()
return sep.join(parts)
npath, last = os.path.split(npath)
parts.append(last)
else:
return path
class RewritePthDistributions(PthDistributions):
@classmethod
def _wrap_lines(cls, lines):
yield cls.prelude
for line in lines:
yield line
yield cls.postlude
_inline = lambda text: textwrap.dedent(text).strip().replace('\n', '; ')
prelude = _inline("""
import sys
sys.__plen = len(sys.path)
""")
postlude = _inline("""
import sys
new = sys.path[sys.__plen:]
del sys.path[sys.__plen:]
p = getattr(sys, '__egginsert', 0)
sys.path[p:p] = new
sys.__egginsert = p + len(new)
""")
if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'rewrite') == 'rewrite':
PthDistributions = RewritePthDistributions
def _first_line_re():
"""
Return a regular expression based on first_line_re suitable for matching
strings.
"""
if isinstance(first_line_re.pattern, str):
return first_line_re
# first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
return re.compile(first_line_re.pattern.decode())
def auto_chmod(func, arg, exc):
if func is os.remove and os.name == 'nt':
chmod(arg, stat.S_IWRITE)
return func(arg)
et, ev, _ = sys.exc_info()
reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg))))
def update_dist_caches(dist_path, fix_zipimporter_caches):
"""
Fix any globally cached `dist_path` related data
`dist_path` should be a path of a newly installed egg distribution (zipped
or unzipped).
sys.path_importer_cache contains finder objects that have been cached when
importing data from the original distribution. Any such finders need to be
cleared since the replacement distribution might be packaged differently,
e.g. a zipped egg distribution might get replaced with an unzipped egg
folder or vice versa. Having the old finders cached may then cause Python
to attempt loading modules from the replacement distribution using an
incorrect loader.
zipimport.zipimporter objects are Python loaders charged with importing
data packaged inside zip archives. If stale loaders referencing the
original distribution, are left behind, they can fail to load modules from
the replacement distribution. E.g. if an old zipimport.zipimporter instance
is used to load data from a new zipped egg archive, it may cause the
operation to attempt to locate the requested data in the wrong location -
one indicated by the original distribution's zip archive directory
information. Such an operation may then fail outright, e.g. report having
read a 'bad local file header', or even worse, it may fail silently &
return invalid data.
zipimport._zip_directory_cache contains cached zip archive directory
information for all existing zipimport.zipimporter instances and all such
instances connected to the same archive share the same cached directory
information.
If asked, and the underlying Python implementation allows it, we can fix
all existing zipimport.zipimporter instances instead of having to track
them down and remove them one by one, by updating their shared cached zip
archive directory information. This, of course, assumes that the
replacement distribution is packaged as a zipped egg.
If not asked to fix existing zipimport.zipimporter instances, we still do
our best to clear any remaining zipimport.zipimporter related cached data
that might somehow later get used when attempting to load data from the new
distribution and thus cause such load operations to fail. Note that when
tracking down such remaining stale data, we can not catch every conceivable
usage from here, and we clear only those that we know of and have found to
cause problems if left alive. Any remaining caches should be updated by
whomever is in charge of maintaining them, i.e. they should be ready to
handle us replacing their zip archives with new distributions at runtime.
"""
# There are several other known sources of stale zipimport.zipimporter
# instances that we do not clear here, but might if ever given a reason to
# do so:
# * Global setuptools pkg_resources.working_set (a.k.a. 'master working
# set') may contain distributions which may in turn contain their
# zipimport.zipimporter loaders.
# * Several zipimport.zipimporter loaders held by local variables further
# up the function call stack when running the setuptools installation.
# * Already loaded modules may have their __loader__ attribute set to the
# exact loader instance used when importing them. Python 3.4 docs state
# that this information is intended mostly for introspection and so is
# not expected to cause us problems.
normalized_path = normalize_path(dist_path)
_uncache(normalized_path, sys.path_importer_cache)
if fix_zipimporter_caches:
_replace_zip_directory_cache_data(normalized_path)
else:
# Here, even though we do not want to fix existing and now stale
# zipimporter cache information, we still want to remove it. Related to
# Python's zip archive directory information cache, we clear each of
# its stale entries in two phases:
# 1. Clear the entry so attempting to access zip archive information
# via any existing stale zipimport.zipimporter instances fails.
# 2. Remove the entry from the cache so any newly constructed
# zipimport.zipimporter instances do not end up using old stale
# zip archive directory information.
# This whole stale data removal step does not seem strictly necessary,
# but has been left in because it was done before we started replacing
# the zip archive directory information cache content if possible, and
# there are no relevant unit tests that we can depend on to tell us if
# this is really needed.
_remove_and_clear_zip_directory_cache_data(normalized_path)
def _collect_zipimporter_cache_entries(normalized_path, cache):
"""
Return zipimporter cache entry keys related to a given normalized path.
Alternative path spellings (e.g. those using different character case or
those using alternative path separators) related to the same path are
included. Any sub-path entries are included as well, i.e. those
corresponding to zip archives embedded in other zip archives.
"""
result = []
prefix_len = len(normalized_path)
for p in cache:
np = normalize_path(p)
if (np.startswith(normalized_path) and
np[prefix_len:prefix_len + 1] in (os.sep, '')):
result.append(p)
return result
def _update_zipimporter_cache(normalized_path, cache, updater=None):
"""
Update zipimporter cache data for a given normalized path.
Any sub-path entries are processed as well, i.e. those corresponding to zip
archives embedded in other zip archives.
Given updater is a callable taking a cache entry key and the original entry
(after already removing the entry from the cache), and expected to update
the entry and possibly return a new one to be inserted in its place.
Returning None indicates that the entry should not be replaced with a new
one. If no updater is given, the cache entries are simply removed without
any additional processing, the same as if the updater simply returned None.
"""
for p in _collect_zipimporter_cache_entries(normalized_path, cache):
# N.B. pypy's custom zipimport._zip_directory_cache implementation does
# not support the complete dict interface:
# * Does not support item assignment, thus not allowing this function
# to be used only for removing existing cache entries.
# * Does not support the dict.pop() method, forcing us to use the
# get/del patterns instead. For more detailed information see the
# following links:
# https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960
# https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99
old_entry = cache[p]
del cache[p]
new_entry = updater and updater(p, old_entry)
if new_entry is not None:
cache[p] = new_entry
def _uncache(normalized_path, cache):
_update_zipimporter_cache(normalized_path, cache)
def _remove_and_clear_zip_directory_cache_data(normalized_path):
def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
old_entry.clear()
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=clear_and_remove_cached_zip_archive_directory_data)
# PyPy Python implementation does not allow directly writing to the
# zipimport._zip_directory_cache and so prevents us from attempting to correct
# its content. The best we can do there is clear the problematic cache content
# and have PyPy repopulate it as needed. The downside is that if there are any
# stale zipimport.zipimporter instances laying around, attempting to use them
# will fail due to not having its zip archive directory information available
# instead of being automatically corrected to use the new correct zip archive
# directory information.
if '__pypy__' in sys.builtin_module_names:
_replace_zip_directory_cache_data = \
_remove_and_clear_zip_directory_cache_data
else:
def _replace_zip_directory_cache_data(normalized_path):
def replace_cached_zip_archive_directory_data(path, old_entry):
# N.B. In theory, we could load the zip directory information just
# once for all updated path spellings, and then copy it locally and
# update its contained path strings to contain the correct
# spelling, but that seems like a way too invasive move (this cache
# structure is not officially documented anywhere and could in
# theory change with new Python releases) for no significant
# benefit.
old_entry.clear()
zipimport.zipimporter(path)
old_entry.update(zipimport._zip_directory_cache[path])
return old_entry
_update_zipimporter_cache(
normalized_path, zipimport._zip_directory_cache,
updater=replace_cached_zip_archive_directory_data)
def is_python(text, filename='<string>'):
"Is this string a valid Python script?"
try:
compile(text, filename, 'exec')
except (SyntaxError, TypeError):
return False
else:
return True
def is_sh(executable):
"""Determine if the specified executable is a .sh (contains a #! line)"""
try:
with io.open(executable, encoding='latin-1') as fp:
magic = fp.read(2)
except (OSError, IOError):
return executable
return magic == '#!'
def nt_quote_arg(arg):
"""Quote a command line argument according to Windows parsing rules"""
return subprocess.list2cmdline([arg])
def is_python_script(script_text, filename):
"""Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
"""
if filename.endswith('.py') or filename.endswith('.pyw'):
return True # extension says it's Python
if is_python(script_text, filename):
return True # it's syntactically valid Python
if script_text.startswith('#!'):
# It begins with a '#!' line, so check if 'python' is in it somewhere
return 'python' in script_text.splitlines()[0].lower()
return False # Not any Python I can recognize
try:
from os import chmod as _chmod
except ImportError:
# Jython compatibility
def _chmod(*args):
pass
def chmod(path, mode):
log.debug("changing mode of %s to %o", path, mode)
try:
_chmod(path, mode)
except os.error as e:
log.debug("chmod failed: %s", e)
def fix_jython_executable(executable, options):
warnings.warn("Use JythonCommandSpec", DeprecationWarning, stacklevel=2)
if not JythonCommandSpec.relevant():
return executable
cmd = CommandSpec.best().from_param(executable)
cmd.install_options(options)
return cmd.as_header().lstrip('#!').rstrip('\n')
class CommandSpec(list):
"""
A command spec for a #! header, specified as a list of arguments akin to
those passed to Popen.
"""
options = []
split_args = dict()
@classmethod
def best(cls):
"""
Choose the best CommandSpec class based on environmental conditions.
"""
return cls if not JythonCommandSpec.relevant() else JythonCommandSpec
@classmethod
def _sys_executable(cls):
_default = os.path.normpath(sys.executable)
return os.environ.get('__PYVENV_LAUNCHER__', _default)
@classmethod
def from_param(cls, param):
"""
Construct a CommandSpec from a parameter to build_scripts, which may
be None.
"""
if isinstance(param, cls):
return param
if isinstance(param, list):
return cls(param)
if param is None:
return cls.from_environment()
# otherwise, assume it's a string.
return cls.from_string(param)
@classmethod
def from_environment(cls):
return cls([cls._sys_executable()])
@classmethod
def from_string(cls, string):
"""
Construct a command spec from a simple string representing a command
line parseable by shlex.split.
"""
items = shlex.split(string, **cls.split_args)
return cls(items)
def install_options(self, script_text):
self.options = shlex.split(self._extract_options(script_text))
cmdline = subprocess.list2cmdline(self)
if not isascii(cmdline):
self.options[:0] = ['-x']
@staticmethod
def _extract_options(orig_script):
"""
Extract any options from the first line of the script.
"""
first = (orig_script + '\n').splitlines()[0]
match = _first_line_re().match(first)
options = match.group(1) or '' if match else ''
return options.strip()
def as_header(self):
return self._render(self + list(self.options))
@staticmethod
def _render(items):
cmdline = subprocess.list2cmdline(items)
return '#!' + cmdline + '\n'
# For pbr compat; will be removed in a future version.
sys_executable = CommandSpec._sys_executable()
class WindowsCommandSpec(CommandSpec):
split_args = dict(posix=False)
class JythonCommandSpec(CommandSpec):
@classmethod
def relevant(cls):
return (
sys.platform.startswith('java')
and
__import__('java').lang.System.getProperty('os.name') != 'Linux'
)
def as_header(self):
"""
Workaround Jython's sys.executable being a .sh (an invalid
shebang line interpreter)
"""
if not is_sh(self[0]):
return super(JythonCommandSpec, self).as_header()
if self.options:
# Can't apply the workaround, leave it broken
log.warn(
"WARNING: Unable to adapt shebang line for Jython,"
" the following script is NOT executable\n"
" see http://bugs.jython.org/issue1112 for"
" more information.")
return super(JythonCommandSpec, self).as_header()
items = ['/usr/bin/env'] + self + list(self.options)
return self._render(items)
class ScriptWriter(object):
"""
Encapsulates behavior around writing entry point scripts for console and
gui apps.
"""
template = textwrap.dedent("""
# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
__requires__ = %(spec)r
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.exit(
load_entry_point(%(spec)r, %(group)r, %(name)r)()
)
""").lstrip()
command_spec_class = CommandSpec
@classmethod
def get_script_args(cls, dist, executable=None, wininst=False):
# for backward compatibility
warnings.warn("Use get_args", DeprecationWarning)
writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
header = cls.get_script_header("", executable, wininst)
return writer.get_args(dist, header)
@classmethod
def get_script_header(cls, script_text, executable=None, wininst=False):
# for backward compatibility
warnings.warn("Use get_header", DeprecationWarning)
if wininst:
executable = "python.exe"
cmd = cls.command_spec_class.best().from_param(executable)
cmd.install_options(script_text)
return cmd.as_header()
@classmethod
def get_args(cls, dist, header=None):
"""
Yield write_script() argument tuples for a distribution's
console_scripts and gui_scripts entry points.
"""
if header is None:
header = cls.get_header()
spec = str(dist.as_requirement())
for type_ in 'console', 'gui':
group = type_ + '_scripts'
for name, ep in dist.get_entry_map(group).items():
cls._ensure_safe_name(name)
script_text = cls.template % locals()
args = cls._get_script_args(type_, name, header, script_text)
for res in args:
yield res
@staticmethod
def _ensure_safe_name(name):
"""
Prevent paths in *_scripts entry point names.
"""
has_path_sep = re.search(r'[\\/]', name)
if has_path_sep:
raise ValueError("Path separators not allowed in script names")
@classmethod
def get_writer(cls, force_windows):
# for backward compatibility
warnings.warn("Use best", DeprecationWarning)
return WindowsScriptWriter.best() if force_windows else cls.best()
@classmethod
def best(cls):
"""
Select the best ScriptWriter for this environment.
"""
return WindowsScriptWriter.best() if sys.platform == 'win32' else cls
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
# Simply write the stub with no extension.
yield (name, header + script_text)
@classmethod
def get_header(cls, script_text="", executable=None):
"""Create a #! line, getting options (if any) from script_text"""
cmd = cls.command_spec_class.best().from_param(executable)
cmd.install_options(script_text)
return cmd.as_header()
class WindowsScriptWriter(ScriptWriter):
command_spec_class = WindowsCommandSpec
@classmethod
def get_writer(cls):
# for backward compatibility
warnings.warn("Use best", DeprecationWarning)
return cls.best()
@classmethod
def best(cls):
"""
Select the best ScriptWriter suitable for Windows
"""
writer_lookup = dict(
executable=WindowsExecutableLauncherWriter,
natural=cls,
)
# for compatibility, use the executable launcher by default
launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
return writer_lookup[launcher]
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"For Windows, add a .py extension"
ext = dict(console='.pya', gui='.pyw')[type_]
if ext not in os.environ['PATHEXT'].lower().split(';'):
warnings.warn("%s not listed in PATHEXT; scripts will not be "
"recognized as executables." % ext, UserWarning)
old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
old.remove(ext)
header = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield name + ext, header + script_text, 't', blockers
@staticmethod
def _adjust_header(type_, orig_header):
"""
Make sure 'pythonw' is used for gui and and 'python' is used for
console (regardless of what sys.executable is).
"""
pattern = 'pythonw.exe'
repl = 'python.exe'
if type_ == 'gui':
pattern, repl = repl, pattern
pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
new_header = pattern_ob.sub(string=orig_header, repl=repl)
clean_header = new_header[2:-1].strip('"')
if sys.platform == 'win32' and not os.path.exists(clean_header):
# the adjusted version doesn't exist, so return the original
return orig_header
return new_header
class WindowsExecutableLauncherWriter(WindowsScriptWriter):
@classmethod
def _get_script_args(cls, type_, name, header, script_text):
"""
For Windows, add a .py extension and an .exe launcher
"""
if type_ == 'gui':
launcher_type = 'gui'
ext = '-script.pyw'
old = ['.pyw']
else:
launcher_type = 'cli'
ext = '-script.py'
old = ['.py', '.pyc', '.pyo']
hdr = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield (name + ext, hdr + script_text, 't', blockers)
yield (
name + '.exe', get_win_launcher(launcher_type),
'b' # write in binary mode
)
if not is_64bit():
# install a manifest for the launcher to prevent Windows
# from detecting it as an installer (which it will for
# launchers like easy_install.exe). Consider only
# adding a manifest for launchers detected as installers.
# See Distribute #143 for details.
m_name = name + '.exe.manifest'
yield (m_name, load_launcher_manifest(name), 't')
# for backward-compatibility
get_script_args = ScriptWriter.get_script_args
get_script_header = ScriptWriter.get_script_header
def get_win_launcher(type):
"""
Load the Windows launcher (executable) suitable for launching a script.
`type` should be either 'cli' or 'gui'
Returns the executable as a byte string.
"""
launcher_fn = '%s.exe' % type
if platform.machine().lower() == 'arm':
launcher_fn = launcher_fn.replace(".", "-arm.")
if is_64bit():
launcher_fn = launcher_fn.replace(".", "-64.")
else:
launcher_fn = launcher_fn.replace(".", "-32.")
return resource_string('setuptools', launcher_fn)
def load_launcher_manifest(name):
manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
if PY2:
return manifest % vars()
else:
return manifest.decode('utf-8') % vars()
def rmtree(path, ignore_errors=False, onerror=auto_chmod):
"""Recursively delete a directory tree.
This code is taken from the Python 2.4 version of 'shutil', because
the 2.3 version doesn't really work right.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
names = []
try:
names = os.listdir(path)
except os.error:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except os.error:
mode = 0
if stat.S_ISDIR(mode):
rmtree(fullname, ignore_errors, onerror)
else:
try:
os.remove(fullname)
except os.error:
onerror(os.remove, fullname, sys.exc_info())
try:
os.rmdir(path)
except os.error:
onerror(os.rmdir, path, sys.exc_info())
def current_umask():
tmp = os.umask(0o022)
os.umask(tmp)
return tmp
def bootstrap():
# This function is called when setuptools*.egg is run using /bin/sh
import setuptools
argv0 = os.path.dirname(setuptools.__path__[0])
sys.argv[0] = argv0
sys.argv.append(argv0)
main()
def main(argv=None, **kw):
from setuptools import setup
from setuptools.dist import Distribution
class DistributionWithoutHelpCommands(Distribution):
common_usage = ""
def _show_help(self, *args, **kw):
with _patch_usage():
Distribution._show_help(self, *args, **kw)
if argv is None:
argv = sys.argv[1:]
with _patch_usage():
setup(
script_args=['-q', 'easy_install', '-v'] + argv,
script_name=sys.argv[0] or 'easy_install',
distclass=DistributionWithoutHelpCommands, **kw
)
@contextlib.contextmanager
def _patch_usage():
import distutils.core
USAGE = textwrap.dedent("""
usage: %(script)s [options] requirement_or_url ...
or: %(script)s --help
""").lstrip()
def gen_usage(script_name):
return USAGE % dict(
script=os.path.basename(script_name),
)
saved = distutils.core.gen_usage
distutils.core.gen_usage = gen_usage
try:
yield
finally:
distutils.core.gen_usage = saved
| darkwing/kuma | vendor/packages/setuptools/command/easy_install.py | Python | mpl-2.0 | 87,197 |
#!/usr/bin/env python
import threading
import dns
from dnsdisttests import DNSDistTest
class TestTrailing(DNSDistTest):
# this test suite uses a different responder port
# because, contrary to the other ones, its
# responders allow trailing data and we don't want
# to mix things up.
_testServerPort = 5360
_config_template = """
newServer{address="127.0.0.1:%s"}
addAction(AndRule({QTypeRule(dnsdist.AAAA), TrailingDataRule()}), DropAction())
"""
@classmethod
def startResponders(cls):
print("Launching responders..")
cls._UDPResponder = threading.Thread(name='UDP Responder', target=cls.UDPResponder, args=[cls._testServerPort, True])
cls._UDPResponder.setDaemon(True)
cls._UDPResponder.start()
cls._TCPResponder = threading.Thread(name='TCP Responder', target=cls.TCPResponder, args=[cls._testServerPort, True])
cls._TCPResponder.setDaemon(True)
cls._TCPResponder.start()
def testTrailingAllowed(self):
"""
Trailing: Allowed
"""
name = 'allowed.trailing.tests.powerdns.com.'
query = dns.message.make_query(name, 'A', 'IN')
response = dns.message.make_response(query)
rrset = dns.rrset.from_text(name,
3600,
dns.rdataclass.IN,
dns.rdatatype.A,
'127.0.0.1')
response.answer.append(rrset)
raw = query.to_wire()
raw = raw + 'A'* 20
(receivedQuery, receivedResponse) = self.sendUDPQuery(raw, response, rawQuery=True)
self.assertTrue(receivedQuery)
self.assertTrue(receivedResponse)
receivedQuery.id = query.id
self.assertEquals(query, receivedQuery)
self.assertEquals(response, receivedResponse)
(receivedQuery, receivedResponse) = self.sendTCPQuery(raw, response, rawQuery=True)
self.assertTrue(receivedQuery)
self.assertTrue(receivedResponse)
receivedQuery.id = query.id
self.assertEquals(query, receivedQuery)
self.assertEquals(response, receivedResponse)
def testTrailingDropped(self):
"""
Trailing: dropped
"""
name = 'dropped.trailing.tests.powerdns.com.'
query = dns.message.make_query(name, 'AAAA', 'IN')
raw = query.to_wire()
raw = raw + 'A'* 20
(_, receivedResponse) = self.sendUDPQuery(raw, response=None, rawQuery=True)
self.assertEquals(receivedResponse, None)
(_, receivedResponse) = self.sendTCPQuery(raw, response=None, rawQuery=True)
self.assertEquals(receivedResponse, None)
| grahamhayes/pdns | regression-tests.dnsdist/test_Trailing.py | Python | gpl-2.0 | 2,718 |
"""
This module contains Python triggers for Cobbler.
With Cobbler one is able to add custom actions and commands after many events happening in Cobbler. The Python modules
presented here are an example of what can be done after certain events. Custom triggers may be added in any language as
long as Cobbler is allowed to execute them. If implemented in Python they need to follow the following specification:
- Expose a method called ``register()`` which returns a ``str`` and returns the path of the trigger in the filesystem.
- Expose a method called ``run(api, args)`` of type ``int``. The integer would represent the exit status of an e.g.
shell script. Thus 0 means success and anything else a failure.
"""
| cobbler/cobbler | cobbler/modules/installation/__init__.py | Python | gpl-2.0 | 717 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.