code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
#
# Copyright John Reid 2008
#
"""
Code to partition Transfac factors into equivalent sets.
"""
import biopsy.transfac as T
import biopsy
import boost.graph as bgl
from cookbook.lru_cache import lru_cache
class Graph(bgl.Graph):
__getstate_manages_dict__ = 1
"""So Boost.python knows we manage the object's dict..."""
def __getstate__(self):
return (
bgl.Graph.__getstate__(self),
self.vertex_id_prop_name,
self.vertex_id_prop_type
)
def __setstate__(self, state):
bgl_state, self.vertex_id_prop_name, self.vertex_id_prop_type = state
bgl.Graph.__setstate__(self, bgl_state)
self.id_2_vertex = dict()
self.vertex_2_id = self.vertex_properties[self.vertex_id_prop_name]
for v in self.vertices:
self.id_2_vertex[self.vertex_2_id[v]] = v
def __init__(self, vertex_id_prop_name='label', vertex_id_prop_type='string'):
"""
Creates a new Graph that has a property map from the given type to the vertices.
@arg vertex_id_prop_name: The name of the property map that maps vertices to ids.
@arg vertex_id_prop_type: The type of the property map that maps vertices to ids. It can be
one of the listed types.
Name C++ type
--------------------
integer int
float float
vertex vertex_descriptor
edge edge_descriptor
string boost::python::str
point2d boost::graph::python::point2d
point3d boost::graph::python::point3d
object boost::python::object
color boost::default_color_type
index int (contains index of each vertex)
"""
bgl.Graph.__init__(self)
self.vertex_id_prop_name = vertex_id_prop_name
"The name of the property map that maps vertices to their ids."
self.vertex_id_prop_type = vertex_id_prop_type
"The type of the property map that maps vertices to their ids (i.e. the type of the ids)."
self.id_2_vertex = dict()
"A dict mapping ids to vertices."
self.vertex_2_id = self.add_vertex_property(vertex_id_prop_name, vertex_id_prop_type)
"A boost.graph property map mapping vertices to ids."
def get_id(self, v):
"""
Return the id for this vertex
"""
return self.vertex_2_id[v]
def get_vertex_by_id(self, id):
"""
Get the vertex with the given id.
"""
if id not in self.id_2_vertex:
raise RuntimeError('Id is not in graph.')
return self.id_2_vertex[id]
def get_or_add_vertex_by_id(self, id):
"""
Get the vertex with the given id or if it is not in graph, then add the vertex.
"""
if id not in self.id_2_vertex:
v = self.add_vertex()
self.id_2_vertex[id] = v
self.vertex_2_id[v] = id
return self.get_vertex_by_id(id)
def remove_vertex_by_id(self, id):
"""
Remove the vertex with the given id from the graph.
"""
self.remove_vertex(self, self.get_vertex_by_id(id))
def remove_vertex(self, v):
"""
Remove the vertex from the graph. Call clear_vertex first if v has edges.
"""
del self.id_2_vertex[self.vertex_2_id[v]]
return bgl.Graph.remove_vertex(self, v)
def add_edge_by_id(self, id1, id2):
"""
Add an edge between the vertices with the given ids.
"""
return self.add_edge(self.get_vertex_by_id(id1), self.get_vertex_by_id(id2))
@lru_cache(maxsize=1)
def build_factor_synonyms_graph():
"""
Build a graph that encodes all the factor synonyms in transfac.
"""
from itertools import chain
g = Graph()
for f in T.Factor.all(): # for each factor
for synonym1 in chain([f.name], f.synonyms): # for each synonym
v1 = g.get_or_add_vertex_by_id(synonym1)
for synonym2 in chain([f.name], f.synonyms): # add an edge to each other synonym
if synonym1 != synonym2:
v2 = g.get_or_add_vertex_by_id(synonym2)
if v2 not in g.adjacent_vertices(v1):
g.add_edge(v1, v2)
return g
def remove_small_components(g, num_components, component_map, min_size=2):
import numpy
component_sizes = numpy.zeros((num_components,))
for v in g.vertices:
component_sizes[component_map[v]] += 1
for v in g.vertices:
if component_sizes[component_map[v]] < min_size:
g.clear_vertex(v)
g.remove_vertex(v)
return component_sizes
class FactorSynonyms(object):
"""
Partitions the set of all factor names into equivalence partitions based on synonyms.
Maps from factor names to indexes of the partition.
"""
def __init__(self):
self.g = build_factor_synonyms_graph()
self.component_map = self.g.add_vertex_property(name='connected_components', type='integer')
self.num_components = bgl.connected_components(self.g, self.component_map)
self._build_partition_synonyms()
def _build_partition_synonyms(self):
"""
Calculates one synonym to represent each partition
"""
self.partition_synonyms = [None] * self.num_components
for v in self.g.vertices:
idx = self.component_map[v]
if None == self.partition_synonyms[idx]:
self.partition_synonyms[idx] = self.g.get_id(v)
def get_partition_idx(self, factor_name):
"""
Get the index of the partition that this factor name is in.
"""
v = self.g.get_vertex_by_id(factor_name)
return self.component_map[v]
def get_partition_synonym(self, partition_idx):
"""
Return the representative synonym for this partition
"""
return self.partition_synonyms[partition_idx]
def get_partition_synonyms(self, partition_idx):
"""
Return the synonyms that make up this partition
"""
return [
self.g.get_id(v)
for v in self.g.vertices
if partition_idx == self.component_map[v]
]
def get_synonym(self, factor_name):
"""
Return the representative synonym of this factor name
"""
return self.get_partition_synonym(self.get_partition_idx(factor_name))
def get_synonyms(self, factor_name):
"""
Return all the synonyms of this factor name
"""
return self.get_partition_synonyms(self.get_partition_idx(factor_name))
class Pssm2FactorSynonymMap(dict):
"""
Maps Transfac PSSM accessions to sets of factor synonyms
"""
def __init__(self, factor_synonyms):
self.factor_synonyms = factor_synonyms
for acc in biopsy.get_transfac_pssm_accessions(biopsy.transfac.PssmFilter.all_pssms()):
for factor in biopsy.transfac.TableLink(acc).entry.factors:
self[acc].add(self.factor_synonyms.get_synonym(factor.link.entry.name))
def __missing__(self, k):
self[k] = set()
return self[k]
if '__main__' == __name__:
factor_synonyms = FactorSynonyms()
| JohnReid/biopsy | Python/biopsy/transfac/factor_synonyms.py | Python | mit | 7,294 |
from time import strftime as now
import threading
import serial
FORMAT = '%Y-%m-%d %H:%M:%S'
ser = serial.Serial('COM4', baudrate=57600)
def log(data, file='sim800.log'):
with open(file, 'a+') as f:
f.write(now(FORMAT) + ' ' + str(data) + '\n')
f.close()
def read_from_port(ser):
while True:
data = ser.readline().rstrip()
print(data)
log(b'<< ' + data)
thread = threading.Thread(target=read_from_port, args=(ser,))
thread.start()
def send(command):
data = bytes(command + "\r", encoding='ascii')
ser.write(data)
log(b'>> ' + data)
while True:
try:
query = input()
if query.upper().startswith("AT"):
send(query)
elif query.upper() == "SEND TIME":
z = float(now("%z")[:3] + '.' + now("%z")[3:]) * 4
time = now("%y/%m/%d,%H:%M:%S") + "{:+03.0f}".format(z)
print("sending time %s" % time)
send('AT+CCLK="%s"' % time)
elif query.upper() == "TIME":
send("AT+CCLK?")
elif query.upper() == "ACTIVATE":
send('AT+CFUN=1')
send('AT+SAPBR=3,1,"CONTYPE","GPRS"')
send('AT+SAPBR=3,1,"APN","telenor"')
send('AT+SAPBR=1,1')
send('AT+SAPBR=2,1')
except (KeyboardInterrupt, SystemExit, EOFError):
break | wittrup/crap | python/sim800l.py | Python | mit | 1,342 |
import pytest
import numpy as np
import murraylab_tools.echo as mt_echo
@pytest.mark.skip(reason="tests not yet implmented")
class TestEchoFunctions():
def test_implement_me(self):
assert 0
| sclamons/murraylab_tools | murraylab_tools/tests/echo_tests/test_misc_echo_functions.py | Python | mit | 204 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
depends_on = (
('core', '0009_drop_timtecuser'),
)
def forwards(self, orm):
# Adding model 'Answer'
db.rename_table('core_activity', 'activities_activity')
db.rename_table('core_answer', 'activities_answer')
if not db.dry_run:
# For permissions to work properly after migrating
orm['contenttypes.contenttype'].objects.filter(app_label='core', model='activity').update(app_label='activities')
orm['contenttypes.contenttype'].objects.filter(app_label='core', model='answer').update(app_label='activities')
def backwards(self, orm):
db.rename_table('activities_activity', 'core_activity')
db.rename_table('activities_answer', 'core_answer')
models = {
u'accounts.timtecuser': {
'Meta': {'object_name': 'TimtecUser'},
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'site': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'activities.activity': {
'Meta': {'ordering': "['-id']", 'object_name': 'Activity'},
'data': ('jsonfield.fields.JSONField', [], {}),
'expected': ('jsonfield.fields.JSONField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'activities.answer': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Answer'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Activity']"}),
'given': ('jsonfield.fields.JSONField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.TimtecUser']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['activities']
| mupi/tecsaladeaula | activities/migrations/0002_create_activity_and_answer.py | Python | agpl-3.0 | 5,688 |
# -*- coding: utf-8 -*-
from os import environ
#Create = 20120517
YBLOG_VERSION = '20120721' # 当前版本
APP_NAME = environ.get("APP_NAME", "")
debug = not APP_NAME
#Mysql收费太贵,用kvdb代替MySql,思路是尽量只改model和少量template,尽量不修改之前逻辑
MYSQL_TO_KVDB_SUPPORT = True
#迁移说明
#1.备份数据,登录SAE后台,备份sp_archive,sp_category,sp_links,sp_posts,sp_tags,sp_user
# 备份的格式为yml,如果博客多大话记得勾选转储所有行,压缩导出
#2. 先把MYSQL_TO_KVDB_SUPPORT 设置为True
#3. 运行tools目录下的mysql2kv,直至完成。观察有没有出错,如果出错可能要改一下脚本。
#4. 登录SAE或者博客后台,清除缓存,自测博客。
#5. 在SAE后台上彻底关闭Mysql并清除Mysql数据
#6. 如发现问题,可与我(yyobin#gmail.com)邮件联系,我会尽快修正bug。
##下面需要修改
#SITE_TITLE = u"" #博客标题
#SITE_TITLE2 = u"" #显示在边栏上头(有些模板用不到)
#SITE_SUB_TITLE = u"" #副标题
#KEYWORDS = u"宅男 手机 Python SAE GAE 云计算 淘宝" #博客关键字
#SITE_DECR = u"一个宅男的博客,写写程序,谈谈工作,写写生活" #博客描述,给搜索引擎看
#ADMIN_NAME = u"abc" #发博文的作者
#NOTICE_MAIL = u"****@gmail.com" #常用的,容易看到的接收提醒邮件,如QQ 邮箱,仅作收件用
###配置邮件发送信息,提醒邮件用的,必须正确填写,建议用Gmail
#MAIL_FROM = '****@gmail.com'
#MAIL_SMTP = 'smtp.gmail.com'
#MAIL_PORT = 587
#MAIL_PASSWORD = ''
#MAIL_FALG = True#只有gmail才是True
#放在网页底部的统计代码
#ANALYTICS_CODE = """"""
#ADSENSE_CODE1 = """"""
#ADSENSE_CODE2 = """"""
#ADSENSE_CODE3 = """"""
#使用SAE Storage 服务(保存上传的附件),需在SAE管理面板创建
STORAGE_DOMAIN_NAME = 'attachment'
###设置容易调用的jquery 文件
JQUERY = "http://lib.sinaapp.com/js/jquery/1.6.2/jquery.min.js"
COPY_YEAR = '2012' #页脚的 © 2011
MAJOR_DOMAIN = '%s.sinaapp.com' % APP_NAME #主域名,默认是SAE 的二级域名
#MAJOR_DOMAIN = 'www.yourdomain.com'
##博客使用的主题,目前虽然有default/octopress/octopress-disqus,但是只有octopress可用
##你也可以把自己喜欢的wp主题移植过来,或者修改default或者octopress-disqus
#制作方法参见 http://saepy.sinaapp.com/t/49
#以后要在博客设置里设置为皮肤可换
if MYSQL_TO_KVDB_SUPPORT:
THEME = ['octopress-kv','admin-kv']
else:
THEME = ['octopress','admin']
#使用disqus 评论系统,如果你使用就填 website shortname,
#申请地址 http://disqus.com/
DISQUS_WEBSITE_SHORTNAME = ''
####友情链接列表,在管理后台也实现了管理,下面的链接列表仍然有效并排在前面
LINK_BROLL = [
{"text": 'Sina App Engine', "url": 'http://sae.sina.com.cn/'},
]
#当发表新博文时自动ping RPC服务,中文的下面三个差不多了
XML_RPC_ENDPOINTS = [
'http://blogsearch.google.com/ping/RPC2',
'http://rpc.pingomatic.com/',
'http://ping.baidu.com/ping/RPC2'
]
##如果要在本地测试则需要配置Mysql 数据库信息
if debug:
MYSQL_DB = 'app_saepy'
MYSQL_USER = 'root'
MYSQL_PASS = '123'
MYSQL_HOST_M = '127.0.0.1'
MYSQL_HOST_S = '127.0.0.1'
MYSQL_PORT = '3306'
####除了修改上面的设置,你还需在SAE 后台开通下面几项服务:
# 1 初始化 Mysql
# 2 建立一个名为 attachment 的 Storage
# 3 启用Memcache,初始化大小为1M的 mc,大小可以调,日后文章多了,PV多了可增加
# 4 创建一个 名为 default 的 Task Queue
# 详见 http://saepy.sinaapp.com/t/50 详细安装指南
############## 下面不建议修改 ###########################
if debug:
BASE_URL = 'http://127.0.0.1:8080'
else:
BASE_URL = 'http://%s'%MAJOR_DOMAIN
LANGUAGE = 'zh-CN'
COMMENT_DEFAULT_VISIBLE = 1 #0/1 #发表评论时是否显示 设为0时则需要审核才显示
GRAVATAR_SUPPORT = 0
COMMENT_EMAIL_REQUIRE = 0 #comment是否需要输入email
EACH_PAGE_POST_NUM = 7 #每页显示文章数
EACH_PAGE_COMMENT_NUM = 10 #每页评论数
RELATIVE_POST_NUM = 5 #显示相关文章数
SHORTEN_CONTENT_WORDS = 150 #文章列表截取的字符数
DESCRIPTION_CUT_WORDS = 100 #meta description 显示的字符数
RECENT_COMMENT_NUM = 5 #边栏显示最近评论数
ADMIN_RECENT_COMMENT_NUM = 10 #在admin界面显示的评论数
RECENT_COMMENT_CUT_WORDS = 20 #边栏评论显示字符数
LINK_NUM = 10 #边栏显示的友情链接数
MAX_COMMENT_NUM_A_DAY = 10 #客户端设置Cookie 限制每天发的评论数
PAGE_CACHE = not debug #本地没有Memcache 服务
COMMON_CACHE_TIME = 3600*24 #通用缓存时间
PAGE_CACHE_TIME = 3600*24 #默认页面缓存时间
POST_CACHE_TIME = 3600*24 #默认文章缓存时间
HOT_TAGS_NUM = 30 #右侧热门标签显示数
MAX_ARCHIVES_NUM = 50 #右侧热门标签显示数
MAX_IDLE_TIME = 5 #数据库最大空闲时间 SAE文档说是30 其实更小,设为5,没问题就不要改了
BLOG_PSW_SUPPORT = True #博客支持密码阅读
LINK_BROLL_SUPPORT = False #sidebar是否支持友情链接
BLOG_BACKUP_SUPPORT = False #是否支持博客备份
NUM_SHARDS = 0 #分片计数器的个数,人少的话用0就可以了,如果由0扩展到比如3,可能程序需要稍微修改一下
if NUM_SHARDS > 0:
SHARD_COUNT_SUPPORT = True #是否支持分片计数器
else:
SHARD_COUNT_SUPPORT = False
#MOVE_SECRET = '123456' #迁移博客的密码
DETAIL_SETTING_SUPPORT = False #是否支持详细设置
#微信验证码,这是要在微信后台填的字符串,验证是否拥有网站所有权用的
WX_TOKEN = '123456'
#微信最大显示的文章数
WX_MAX_ARTICLE = 5
#微信文章默认图片
WX_DEFAULT_PIC = "http://yobin-attachment.stor.sinaapp.com/zhaoyang1.jpg"
| yobin/saepy-log | setting.py | Python | mit | 5,841 |
import os
import re
import sys
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), "cosmos/VERSION"), "r") as fh:
__version__ = fh.read().strip()
def find_all(path, reg_expr, inverse=False, remove_prefix=False):
if not path.endswith("/"):
path = path + "/"
for root, dirnames, filenames in os.walk(path):
for filename in filenames:
match = re.search(reg_expr, filename) is not None
if inverse:
match = not match
if match:
out = os.path.join(root, filename)
if remove_prefix:
out = out.replace(path, "")
yield out
install_requires = [
"funcsigs",
"boto3",
"blinker",
"sqlalchemy",
"networkx>=2.0",
"six",
"drmaa",
"more-itertools",
"decorator",
"python-dateutil",
"flask",
]
package_data = {"cosmos": list(find_all("cosmos/", ".py|.pyc$", inverse=True, remove_prefix=True))}
setup(
name="cosmos-wfm",
version=__version__,
scripts=["bin/cosmos", "bin/run_pyfunc"],
description="Workflow Management System",
long_description="Cosmos is a library for writing analysis pipelines, and is particularly suited pipelines "
"which analyze next generation sequencing genomic"
"data. See https://github.com/Mizzou-CBMI/COSMOS2 for details.",
url="https://mizzou-cbmi.github.io/",
author="Erik Gafni",
author_email="[email protected]",
maintainer="Erik Gafni",
maintainer_email="[email protected]",
license="GPL v3",
install_requires=install_requires,
extras_require={
"test": [
"flask",
"ipython",
"sphinx_rtd_theme",
"black",
"pytest-timeout",
"pytest-xdist",
"ghp-import",
"sphinx",
"sphinx_rtd_theme",
]
},
packages=find_packages(),
include_package_data=True,
package_data=package_data,
# package_dir = {'cosmos': 'cosmos'},
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Utilities",
],
keywords="workflow machine learning ipeline ngs manager management distributed sge "
"slurm genomics sequencing grid computing scientific",
)
| LPM-HMS/COSMOS2 | setup.py | Python | gpl-3.0 | 2,740 |
import ply.lex as lex
states = (
('fa', 'inclusive'),
('pda', 'inclusive'),
('turing', 'inclusive'),
('grammar', 'inclusive'),
('lsystem', 'inclusive'),
('moore', 'inclusive'),
('mealy', 'inclusive'),
('re', 'inclusive')
)
tokens = (
'ID', # id=
'NAME', # name=
'DOUBLEQUOTES', # "
'LBRACKET', # <
'RBRACKET', # >
'SLASHRBRACKET', # />
'VALUE', # ".*"
'LSTRUCTURE', # <structure>
'RSTRUCTURE', # </structure>
'LTYPE', # <type>
'RTYPE', # </type>
'LAUTOMATON', # <automaton>
'RAUTOMATON', # </automaton>
'LSTATE', # <state>
'RSTATE', # </state>
'LX', # <x>
'RX', # </x>
'LY', # <y>
'RY', # </y>
'INITIAL', # <initial/>
'FINAL', # <final/>
'LTRANSITION', # <transition>
'RTRANSITION', # </transition>
'LFROM', # <from>
'RFROM', # </from>
'LTO', # <to>
'RTO', # </to>
'LREAD', # <read>
'RREAD', # </read>
'READ', # <read/>
'LPRODUCTION', # <production>
'RPRODUCTION', # </production>
'LLEFT', # <left>
'RLEFT', # </left>
'LRIGHT', # <right>
'RRIGHT', # </right>
'RIGHT', # <right/>
'LTRANSOUT', # <transout>
'RTRANSOUT', # </transout>
'LOUTPUT', # <output>
'ROUTPUT', # </output>
'LBLOCK', # <block>
'RBLOCK', # </block>
'LCUSTOMBLOCK',
'RCUSTOMBLOCK',
'LTAG', # <tag>
'RTAG', # </tag>
'LWRITE', # <write>
'RWRITE', # </write>
'WRITE', # <write/>
'LMOVE', # <move>
'RMOVE', # </move>
'LTAPES', # <tapes>
'RTAPES', # </tapes>
'LPOP', # <pop>
'RPOP', # </pop>
'POP', # <pop/>
'LPUSH', # <push>
'RPUSH', # </push>
'PUSH', # <push/>
'LEXPRESSION', # <expression>
'REXPRESSION', # </expression>
'LAXIOM', # <axiom>
'RAXIOM', # </axiom>
'LPARAMETER', # <parameter>
'RPARAMETER', # </parameter>
'LNAME', # <name>
'RNAME', # </name>
'LVALUE', # <value>
'RVALUE' # </value>
)
# t_VALUE = r'[0-9]+\.*[0-9]*|[0-9a-zA-Z]+' #FIXME
t_LSTRUCTURE = r'<structure>'
t_RSTRUCTURE = r'</structure>'
t_LTYPE = r'<type>'
t_RTYPE = r'</type>'
t_LBRACKET = r'<'
t_RBRACKET = r'>'
t_SLASHRBRACKET = r'/>'
t_DOUBLEQUOTES = r'\"'
t_fa_pda_turing_mealy_moore_LAUTOMATON = r'<automaton>'
t_fa_pda_turing_mealy_moore_RAUTOMATON = r'</automaton>'
t_fa_pda_turing_mealy_moore_LSTATE = r'<state'
t_fa_pda_turing_mealy_moore_ID = r'id='
t_fa_pda_turing_mealy_moore_NAME = r'name='
t_fa_pda_turing_mealy_moore_RSTATE = r'</state>'
t_fa_pda_turing_mealy_moore_LX = r'<x>'
t_fa_pda_turing_mealy_moore_RX = r'</x>'
t_fa_pda_turing_mealy_moore_LY = r'<y>'
t_fa_pda_turing_mealy_moore_RY = r'</y>'
t_fa_pda_turing_mealy_moore_INITIAL = r'<initial[ ]*/>'
t_fa_pda_turing_mealy_moore_FINAL = r'<final[ ]*/>'
t_fa_pda_turing_mealy_moore_LTRANSITION = r'<transition[^>]*>'
t_fa_pda_turing_mealy_moore_RTRANSITION = r'</transition>'
t_fa_pda_turing_mealy_moore_LFROM = r'<from>'
t_fa_pda_turing_mealy_moore_RFROM = r'</from>'
t_fa_pda_turing_mealy_moore_LTO = r'<to>'
t_fa_pda_turing_mealy_moore_RTO = r'</to>'
t_fa_pda_turing_mealy_moore_LREAD = r'<read[^>]*>'
t_fa_pda_turing_mealy_moore_RREAD = r'</read>'
t_fa_pda_turing_mealy_moore_READ = r'<read[^>]*/>'
t_grammar_lsystem_LPRODUCTION = r'<production>'
t_grammar_lsystem_RPRODUCTION = r'</production>'
t_grammar_lsystem_LLEFT = r'<left>'
t_grammar_lsystem_RLEFT = r'</left>'
t_grammar_lsystem_LRIGHT = r'<right>'
t_grammar_lsystem_RRIGHT = r'</right>'
t_grammar_lsystem_RIGHT = r'<right/>'
t_moore_mealy_LTRANSOUT = r'<transout>'
t_moore_mealy_RTRANSOUT = r'</transout>'
t_moore_LOUTPUT = r'<output>'
t_moore_ROUTPUT = r'</output>'
t_turing_LBLOCK = r'<block[^>]*>'
t_turing_RBLOCK = r'</block>'
t_turing_LCUSTOMBLOCK = r'<.+\.jff[0-9]*>'
t_turing_RCUSTOMBLOCK = r'</.+\.jff[0-9]*>'
t_turing_LTAG = r'<tag>'
t_turing_RTAG = r'</tag>'
t_turing_LWRITE = r'<write[^>]*>'
t_turing_RWRITE = r'</write>'
t_turing_WRITE = r'<write[^>]*/>'
t_turing_LMOVE = r'<move[^>]*>'
t_turing_RMOVE = r'</move>'
t_turing_LTAPES = r'<tapes>'
t_turing_RTAPES = r'</tapes>'
t_pda_LPOP = r'<pop>'
t_pda_RPOP = r'</pop>'
t_pda_POP = r'<pop/>'
t_pda_LPUSH = r'<push>'
t_pda_RPUSH = r'</push>'
t_pda_PUSH = r'<push/>'
t_re_LEXPRESSION = r'<expression>'
t_re_REXPRESSION = r'</expression>'
t_lsystem_LAXIOM = r'<axiom>'
t_lsystem_RAXIOM = r'</axiom>'
t_lsystem_LPARAMETER = r'<parameter>'
t_lsystem_RPARAMETER = r'</parameter>'
t_lsystem_LNAME = r'<name>'
t_lsystem_RNAME = r'</name>'
t_lsystem_LVALUE = r'<value>'
t_lsystem_RVALUE = r'</value>'
t_ignore = ' \t\r\n'
t_ignore_comment = r'<!--.*-->'
t_ignore_version = r'<\?xml.*\?>'
t_ignore_htmlcode = r'\&\#13;' # FIXME
def t_VALUE(t):
r'(?<=>).+(?=</)|[A-Za-z0-9]+'
for state, _ in states:
if t.value == state:
t.lexer.begin(state)
return t
# Error handling rule
def t_ANY_error(t):
print("Illegal character '%s'" % t.value[1])
t.lexer.skip(1)
| leonardolima/jflap-check | lexer.py | Python | mit | 7,440 |
import libtcodpy as libtcod
from vec2d import Vec2d
from model.action import Action, ActionTag
from model.attribute import AttributeTag
from behavior import Behavior
class AIRandomWalkBehavior(Behavior):
def __init__(self, manager):
Behavior.__init__(self, manager)
def generate_actions(self):
events = []
for id,entity in filter(lambda ent: ent[1].get_attribute(AttributeTag.HostileProgram), self.manager.entities.iteritems()):
#TODO: pull an RNG out into entity manager so I can properly save and control rng generation for the purposes of being a roguelike
new_position = Vec2d(libtcod.random_get_int(0, -1, 1), libtcod.random_get_int(0, -1, 1))
#mildly biases horizontal movement
if new_position[0] != 0:
new_position[1] = 0
events.append(Action(ActionTag.ProgramMovement, {'target_id':entity.id, 'value':new_position}))
return events | RCIX/RogueP | model/behaviors/behavior_ai_randomwalk.py | Python | mit | 873 |
class Solution():
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
dic = {}
for i in range(len(nums)):
sub = target - nums[i]
if sub in dic:
return [i, dic[sub]]
else:
dic[nums[i]] = i | YiqunPeng/Leetcode-pyq | solutions/1TwoSum.py | Python | gpl-3.0 | 377 |
#!/usr/bin/python3
"""Build macro-generation headers for to wrap the type-generic RB3 API.
You could say this is a program that writes a program that writes a program...
"""
import sys
import subprocess
def cs(xs):
return ', '.join(xs)
def cleanescapes(lines):
return [line.rstrip(' \\\n') + ' \\\n' for line in lines]
def filetomacro(name):
return ''.join(cleanescapes(open('templates/' + name + '.c').readlines()))
params0 = 'OUTER_TREE_TYPE NODE_TYPE GET_HEAD GET_NODE'.split()
args0 = ['struct BASENAME', 'NODE_TYPE', 'GET_HEAD', 'GET_NODE']
params1 = 'INNER_TREE OUTER_TREE'.split()
args1 = 'BASENAME##_inner_tree BASENAME##_outer_tree'.split()
params2 = 'reset_tree isempty get_min get_max get_prev get_next get_minmax get_prevnext get_root has_child get_child get_parent get_prev_ancestor get_next_ancestor get_prev_descendant get_next_descendant link_and_rebalance unlink_and_rebalance get_parent_dir get_base get_containing_tree'.split()
args2 = ['BASENAME##_{}'.format(name) for name in params2]
params3 = 'nodecmp find_in_subtree find_parent_in_subtree delete_in_subtree insert_in_subtree find delete insert'.split()
args3 = ['BASENAME##_{}##SUFFIX'.format(name) for name in params3]
proxies = """
/*
* This is the stuff the user wants to use
*/
#define RB3_GEN_IMPL_HEADER() \\
_RB3_GEN_IMPL_HEADER_REAL(_RB3_API_EXTERNIMPL)
#define RB3_GEN_IMPL_HEADER_STATIC() \\
_RB3_GEN_IMPL_HEADER_REAL(_RB3_API_STATICIMPL)
#define RB3_GEN_IMPL() \\
_RB3_GEN_IMPL_REAL(_RB3_API_EXTERNIMPL)
#define RB3_GEN_IMPL_STATIC() \\
_RB3_GEN_IMPL_REAL(_RB3_API_STATICIMPL)
#define RB3_GEN_INLINE_PROTO(BASENAME, NODE_TYPE, GET_HEAD, GET_NODE) \\
_RB3_GEN_INLINE_PROTO_REAL(_RB3_API_EXTERNIMPL, {args0}, {args1}, {args2})
#define RB3_GEN_INLINE_PROTO_STATIC(BASENAME, NODE_TYPE, GET_HEAD, GET_NODE) \\
_RB3_GEN_INLINE_PROTO_REAL(_RB3_API_STATICIMPL, {args0}, {args1}, {args2})
#define RB3_GEN_NODECMP(BASENAME, SUFFIX, NODE_TYPE, GET_HEAD, GET_NODE, COMPARE_NODE) \\
_RB3_GEN_NODECMP_REAL(_RB3_API_EXTERNIMPL, {args0}, COMPARE_NODE, {args1}, {args2}, {args3})
#define RB3_GEN_NODECMP_STATIC(BASENAME, SUFFIX, NODE_TYPE, GET_HEAD, GET_NODE, COMPARE_NODE) \\
_RB3_GEN_NODECMP_REAL(_RB3_API_STATICIMPL, {args0}, COMPARE_NODE, {args1}, {args2}, {args3})
#define RB3_GEN_NODECMP_PROTO(BASENAME, SUFFIX, NODE_TYPE, GET_HEAD, GET_NODE, COMPARE_NODE) \\
_RB3_GEN_NODECMP_PROTO_REAL(_RB3_API_EXTERNIMPL, {args0}, COMPARE_NODE, {args1}, {args2}, {args3})
#define RB3_GEN_NODECMP_PROTO_STATIC(BASENAME, SUFFIX, NODE_TYPE, GET_HEAD, GET_NODE, COMPARE_NODE) \\
_RB3_GEN_NODECMP_PROTO_REAL(_RB3_API_STATICIMPL, {args0}, COMPARE_NODE, {args1}, {args2}, {args3})
/* (END stuff) */
""".format(args0=cs(args0), args1=cs(args1), args2=cs(args2), args3=cs(args3))
content = """
/*
* ===========================================================================
* SORRY FOR THIS MESS
*
* These macros are only for implementation. Not part of the API.
* ===========================================================================
*/
#define _RB3_GEN_INLINE_PROTO_REAL(_RB3_API, {params0}, {params1}, {params2}) \\
{tpl_inline_proto}
#define _RB3_GEN_NODECMP_PROTO_REAL(_RB3_API, {params0}, COMPARE_NODE, {params1}, {params2}, {params3}) \\
{tpl_nodecmp_proto}
#define _RB3_GEN_NODECMP_REAL(_RB3_API, {params0}, COMPARE_NODE, {params1}, {params2}, {params3}) \\
{tpl_nodecmp}
#define _RB3_GEN_IMPL_REAL_TYPES(_RB3_API) \\
{tpl_types}
#define _RB3_GEN_IMPL_REAL_INTERNAL(_RB3_API) \\
{tpl_internal}
#define _RB3_GEN_IMPL_REAL_NAVIGATE(_RB3_API) \\
{tpl_navigate}
#define _RB3_GEN_IMPL_REAL_BASIC(_RB3_API) \\
{tpl_basic}
#define _RB3_GEN_IMPL_REAL_IMPL(_RB3_API) \\
{tpl_impl}
#define _RB3_GEN_IMPL_HEADER_REAL(_RB3_API) \\
_RB3_GEN_IMPL_REAL_TYPES(_RB3_API) \\
_RB3_GEN_IMPL_REAL_INTERNAL(_RB3_API) \\
_RB3_GEN_IMPL_REAL_NAVIGATE(_RB3_API) \\
_RB3_GEN_IMPL_REAL_BASIC(_RB3_API)
#define _RB3_GEN_IMPL_REAL(_RB3_API) \\
_RB3_GEN_IMPL_HEADER_REAL(_RB3_API) \\
_RB3_GEN_IMPL_REAL_IMPL(_RB3_API)
""".format(params0=cs(params0), params1=cs(params1), params2=cs(params2), params3=cs(params3),
tpl_inline_proto=filetomacro('wrapper-inline-proto'),
tpl_nodecmp_proto=filetomacro('wrapper-nodecmp-proto'),
tpl_nodecmp=filetomacro('wrapper-nodecmp'),
tpl_types=filetomacro('types'),
tpl_basic=filetomacro('basic'),
tpl_navigate=filetomacro('navigate'),
tpl_internal=filetomacro('internal'),
tpl_impl=filetomacro('impl'))
def cat(filepath):
print(open(filepath).read())
def shell(cmd):
return subprocess.check_output(cmd, shell=True).decode()
print('/*')
cat('LICENSE.txt')
print('*/')
print()
print(r"""
/*
* WARNING:
*
* This file was autogenerated with gen-macros.py from files under templates/
*
* Autogenerated from git commit {git_commit}
*/
""".format(git_commit=shell('git rev-parse HEAD').strip()))
print("""
#ifndef RB3_GEN_HEADER
#define RB3_GEN_HEADER
""")
cat('templates/defs.c')
print(proxies)
print(content)
cat('templates/wrapper-defs.c')
print()
print('#endif /* RB3_GEN_HEADER */')
| jstimpfle/sil | rb3ptr/dist/gen-macros.py | Python | mit | 5,183 |
import math
from gi.repository import Gtk
from gaphas.item import Element as Box
from gaphas.tool.itemtool import (
DragState,
handle_at_point,
item_at_point,
item_tool,
on_drag_begin,
)
class MockEvent:
def __init__(self, modifiers=0):
self._modifiers = modifiers
def get_state(self):
return True, self._modifiers
class MockGesture:
def __init__(self, view, event=MockEvent()):
self._view = view
self._event = event
def get_widget(self):
return self._view
def get_last_event(self, _sequence):
return self._event
def get_current_event_state(self):
return self._event.get_state()[1]
def set_state(self, _state):
pass
def test_should_create_a_gesture(view):
tool = item_tool(view)
assert isinstance(tool, Gtk.Gesture)
def test_select_item_on_click(view, box, window):
tool = MockGesture(view)
drag_state = DragState()
selection = view.selection
on_drag_begin(tool, 0, 0, drag_state)
assert box is selection.focused_item
assert box in selection.selected_items
def test_start_move_handle_on_click(view, box, window):
tool = MockGesture(view)
drag_state = DragState()
on_drag_begin(tool, 0, 0, drag_state)
assert drag_state.moving
assert next(iter(drag_state.moving)).item is box
assert next(iter(drag_state.moving)).handle is box.handles()[0]
def test_get_item_at_point(view, box):
"""Hover tool only reacts on motion-notify events."""
box.width = 50
box.height = 50
assert item_at_point(view, (10, 10)) is box
assert item_at_point(view, (60, 10)) is None
def test_get_unselected_item_at_point(view, box):
box.width = 50
box.height = 50
view.selection.select_items(box)
assert item_at_point(view, (10, 10)) is box
assert item_at_point(view, (10, 10), exclude=(box,)) is None
def test_get_handle_at_point(view, canvas, connections):
box = Box(connections)
box.min_width = 20
box.min_height = 30
box.matrix.translate(20, 20)
box.matrix.rotate(math.pi / 1.5)
canvas.add(box)
i, h = handle_at_point(view, (20, 20))
assert i is box
assert h is box.handles()[0]
def test_get_handle_at_point_at_pi_div_2(view, canvas, connections):
box = Box(connections)
box.min_width = 20
box.min_height = 30
box.matrix.translate(20, 20)
box.matrix.rotate(math.pi / 2)
canvas.add(box)
i, h = handle_at_point(view, (20, 20))
assert i is box
assert h is box.handles()[0]
| amolenaar/gaphas | tests/test_tool_item.py | Python | lgpl-2.1 | 2,557 |
#!/usr/bin/env python
"""
Rules
for *.py files
* if the changed file is __init__.py, and there is a side-band test/ dir, then test the entire test/functional directory
the reason for this is that the init files are usually organizing collections
and those can affect many different apis if they break
* if the filename is test_*.py then include it
* if the filename is *.py, then check to see if it has an associated test_FILENAME file
and if so, include it in the test
* summarize all of the above so that a test_FILENAME that is a subpath of the first bullet
is not tested twice
for non-*.py files
* if the file is in a test/functional directory, test the whole directory
"""
import subprocess
import os
import shutil
import argparse
def cleanup_tox_directory():
if os.path.exists('.tox'):
shutil.rmtree('.tox')
def examine_python_rules(line):
fname, fext = os.path.splitext(line)
filename = os.path.basename(line)
dirname = os.path.dirname(line)
test_filename = 'test_' + filename
functional_test_file = '{0}/test/functional/{1}'.format(dirname, test_filename)
functional_test_dir = '{0}/test/functional/'.format(dirname)
if filename == '__init__.py' and os.path.exists(functional_test_dir):
return functional_test_dir
elif filename.startswith('test_') and filename.endswith('.py'):
return line
elif fext == '.py' and os.path.exists(functional_test_file):
return functional_test_file
elif 'test/functional' in line and filename == '__init__.py':
print(" * Skipping {0} because it is not a test file".format(line))
elif filename == '__init__.py' and not os.path.exists(functional_test_dir):
print(" * {0} does not have a side-band test directory!".format(line))
else:
print(" * {0} did not match any rules!".format(line))
def examine_non_python_rules(line):
if 'test/functional' in line:
return os.path.dirname(line)
def determine_files_to_test(product, commit):
results = []
build_all = [
'setup.py', 'f5/bigip/contexts.py', 'f5/bigip/mixins.py',
'f5/bigip/resource.py', 'f5sdk_plugins/fixtures.py',
'f5/bigip/__init__.py'
]
output_file = "pytest.{0}.jenkins.txt".format(product)
p1 = subprocess.Popen(
['git', '--no-pager', 'diff', '--name-only', 'origin/development', commit],
stdout=subprocess.PIPE,
)
p2 = subprocess.Popen(
['egrep', '-v', '(^requirements\.|^setup.py)'],
stdin=p1.stdout,
stdout=subprocess.PIPE,
)
p3 = subprocess.Popen(
['egrep', '(^f5\/{0}\/)'.format(product)],
stdin=p2.stdout,
stdout=subprocess.PIPE,
)
out, err = p3.communicate()
out = out.splitlines()
out = filter(None, out)
if not out:
return
for line in out:
fname, fext = os.path.splitext(line)
if not os.path.exists(line):
print "{0} was not found. Maybe this is a rename?".format(line)
continue
if line in build_all:
cleanup_tox_directory()
results.append('f5/{0}'.format(product))
elif fext == '.py':
result = examine_python_rules(line)
if result:
results.append(result)
else:
result = examine_non_python_rules(line)
if result:
results.append(result)
if results:
results = set(results)
results = compress_testable_files(results)
fh = open(output_file, 'w')
fh.writelines("%s\n" % l for l in results)
fh.close()
def compress_testable_files(files):
lines = sorted(files)
for idx, item in enumerate(lines):
file, ext = os.path.splitext(item)
if not ext and not file.endswith('/'):
item += '/'
tmp = [x for x in lines if item in x and item != x]
for _ in tmp:
lines.remove(_)
return lines
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c','--commit', help='Git commit to check', required=True)
args = parser.parse_args()
for product in ['iworkflow', 'bigip', 'bigiq']:
determine_files_to_test(product, args.commit)
| F5Networks/f5-common-python | devtools/bin/create-test-list.py | Python | apache-2.0 | 4,268 |
# -*- coding: utf-8 -*-
'''
Created on 28.08.2015
@author: derChris
'''
import re
from collections import OrderedDict
from pydispatch import dispatcher
from abc import abstractmethod
from ezClasses.ezClasses import ezDict
####################
### SHARED GUI INFO
####################
class SharedGUIData():
_DEFAULT_X = 0
_DEFAULT_Y = 0
_DEFAULT_HEIGHT = 300
_DEFAULT_WIDTH = 220
def __init__(self, data = None):
if data and 'pos' in data:
self._pos = data['pos']
else:
self._pos = (self._DEFAULT_X, self._DEFAULT_Y)
if data and 'size' in data:
self._size = data['size']
else:
self._size = (self._DEFAULT_WIDTH, self._DEFAULT_HEIGHT)
def pos(self):
return self._pos
def size(self):
return self._size
def set_pos(self, pos):
self._pos = pos
def set_size(self, size):
self._size = size
def get_data(self):
"""
Get information to save object
"""
data = {}
if self._pos != (self._DEFAULT_X, self._DEFAULT_Y):
data.update({'pos': self._pos})
if self._size != (self._DEFAULT_WIDTH, self._DEFAULT_HEIGHT):
data.update({'size': self._size})
return data
####################
### CONNECTOR
####################
class Connector():
def __init__(self, name, style, pipe = None, node= None):
self._name = name
self._style = style
self._pipe = pipe
self._node = node
if style == 'input':
self._field = 'inputs'
elif style == 'output':
self._field = 'outputs'
else:
raise Exception('Invalid style: "' + style + '"')
def register_pipe(self, pipe):
self._pipe = pipe
def pipe(self):
return self._pipe
def clear(self):
if self._pipe:
self._pipe.delete()
self._pipe = None
def get_data(self):
if self._pipe:
return {self._name: id(self._pipe)}
else:
return {}
def sequence_data(self, pipemap):
if self._pipe:
if id(self._pipe) in pipemap:
# get variable
variable_id = pipemap[id(self._pipe)][0]
# delete pipe from map
if pipemap[id(self._pipe)][1] != self:
raise Exception('Pipe missregister!')
del(pipemap[id(self._pipe)])
#TODO: don t send name
return {self._name: variable_id}, pipemap
elif self.style() == 'output':
pipemap.update({id(self._pipe): (RunSequenceAdditions._get_new_variable_id(pipemap), self._pipe.connector_at_output())})
return {self._name: pipemap[id(self._pipe)][0]}, pipemap
else:
raise Exception('Pipe unresolved: ' + self._name + ' ' + self._node.name() + ' (' + str(id(self._node)) + ')')
else:
return {self._name: None}, pipemap
def name(self):
return self._name
def style(self):
return self._style
def node(self):
return self._node
def field(self):
return self._field
def copy(self, node):
return Connector(self._name, self._style, None, node)
class ValueConnector(Connector):
_VALUE_UPDATED = 'VALUE_CONNECTOR_VALUE_UPDATED'
_PIPE_CONNECTION_STATUS = 'PIPE_CONNECTION_STATUS'
_FLOAT_RE = re.compile('\ *[+-]?[0-9]+(\.[0-9]+)?([eE][+-][0-9]+)?\ *')
_INT_RE = re.compile('\ *[+-]?[0-9]+\ *')
def __init__(self, name, style, default_value, pipe = None, node= None):
super(ValueConnector, self).__init__(name, style, pipe, node)
self._value = default_value
self._field = 'options'
def set_value(self, value):
self._value = value
dispatcher.send(signal = self._VALUE_UPDATED, sender= self, tag= self._name, value= value)
def value(self):
return self._value
def get_data(self):
data = {self._name: {'value': self._value}}
if self._pipe:
data[self._name].update({'pipe': id(self._pipe)})
return data
def register_pipe(self, pipe):
super().register_pipe(pipe)
dispatcher.send(self._PIPE_CONNECTION_STATUS, self, True if pipe else False)
def sequence_data(self, pipemap):
if self._pipe:
return super().sequence_data(pipemap)
else:
if re.match(self._INT_RE, self._value):
return {self._name: ('int', int(self._value))}, pipemap
if re.match(self._FLOAT_RE, self._value):
return {self._name: ('float', float(self._value))}, pipemap
return {self._name: ('string', self._value)}, pipemap
class DataConnector(Connector):
_ITERATOR_UPDATED = 'ITERATOR_UPDATED'
def __init__(self, info, pipe = None, node= None):
super().__init__(info[0], 'output', pipe, node)
self._array_size = info[1]
self._type = info[2]
self._iterate_via = None # Dimension
self._subconnectors = None
def display(self):
if self._iterate_via == None:
return None, None
else:
return self._iterate_via, self._array_size[self._iterate_via-1]
def increase_iteration_dimension(self):
if self._iterate_via == len(self._array_size):
self._iterate_via = None
elif not self._iterate_via:
self._iterate_via = 1
if self._type == 'struct':
self._create_subconnectors()
else:
self._iterate_via += 1
dispatcher.send(signal = self._ITERATOR_UPDATED, sender= self)
####################
### NODE TEMPLATE
####################
class NodeTemplate():
_NODE_CREATED = 'NODE_CREATED'
_NODE_DELETED = 'NODE_DELETED'
def __init__(self, system, data = None):
if data and 'gui_data' in data:
self._gui_data = SharedGUIData(data['gui_data'])
else:
self._gui_data = SharedGUIData()
self._name = ''
self._system = system
self._inputs = OrderedDict()
self._outputs = OrderedDict()
def __getitem__(self, field):
if '_' + field in self.__dict__ and isinstance(self.__dict__['_' + field], OrderedDict):
return self.__dict__['_' + field]
else:
raise KeyError(field + ' is not a valid field name.')
def connector(self, field, name):
if '_' + field in self.__dict__ and isinstance(self.__dict__['_' + field], OrderedDict):
if name in self.__dict__['_' + field]:
return self.__dict__['_' + field][name]
else:
raise KeyError(name + ' is not in ' + field)
else:
raise KeyError(field + ' is not a valid field name.')
### get info
def inputs(self):
return self._inputs
def input_connectors(self):
return tuple(self._inputs.values())
def output_connectors(self):
return tuple(self._outputs.values())
def outputs(self):
return self._outputs
def connectors(self):
inputs = list(self._inputs.values())
outputs = list(self._outputs.values())
return inputs + outputs
def following_nodes(self):
nodes = set()
for connector in self.output_connectors():
if connector.pipe():
nodes.add(connector.pipe().node_at_output())
return nodes
def preceding_nodes(self):
nodes = set()
for connector in self.input_connectors():
nodes.add(connector.pipe().node_at_input())
return nodes
def gui_data(self):
return self._gui_data
def get_data(self):
data = ezDict()
for connector in self.input_connectors():
data['fields']['inputs'].update(connector.get_data())
for connector in self.output_connectors():
data['fields']['outputs'].update(connector.get_data())
data.update({'id' : id(self),
'name': self.name(),
'type': self.__class__.__name__,
'gui_data': self._gui_data.get_data()})
return data.reduce() # remove 'empty' subdicts
@abstractmethod
def sequence_data(self, branchs, pipemap):
pass
def has_inputs(self):
for connector in self.input_connectors():
if connector.pipe():
return True
return False
def unresolved_input_pipes(self, pipe_map):
pipes = set()
for connector in self.connectors():
if connector.style() == 'input' and connector.pipe():
if id(connector.pipe()) not in pipe_map:
pipes.add(connector.pipe())
return pipes
def name(self):
return self._name
### manipulate
def delete(self):
self._system._remove_node(self)
for connector in self.connectors():
connector.clear()
dispatcher.send(signal = self._NODE_DELETED, sender = self)
def remove_node(self):
self._system.remove_node(self)
for connector in self.connectors():
connector.clear()
dispatcher.send(signal = self._NODE_DELETED, sender = self)
@abstractmethod
def create_sequence_item(self, nodes, pipe_map):
pass
@classmethod
def run_sequence_addition(cls):
return None
############################
### FUNCTION NODE TEMPLATE
############################
class FunctionNodeTemplate(NodeTemplate):
_FUNCTION_NODE_OPTIONS_VALUE_UPDATED = 'FUNCTION_NODE_OPTIONS_VALUE_UPDATED'
def __init__(self, name, system, data = None):
if data and 'path' in data:
self._folder = system.file_manager().root_folder().find_folder(data['path'])
elif data and 'folder_item' in data:
self._folder = data['folder_item']
else:
raise Exception('No path or folder given')
super(FunctionNodeTemplate, self).__init__(system, data)
self._name = name
self._options = OrderedDict()
self._properties = dict()
try:
self._read_from_file()
except FileNotFoundError:
self._name = '<' + self._name + '>'
if data and 'fields' in data and 'options' in data['fields']:
for tag, info in data['fields']['options'].items():
if tag in self._options and 'value' in info:
self._options[tag].set_value(info['value'])
### protected
@abstractmethod
def _read_from_file(self):
pass
### public
### get info
def get_data(self):
data = super(FunctionNodeTemplate, self).get_data()
for connector in self._options.values():
data['fields']['options'].update(connector.get_data())
data.update({'path': self._folder.relative_path()})
return data.reduce()
def sequence_data(self, branchs, pipemap):
data = ezDict()
for connector in self.input_connectors():
connector_data, pipemap = connector.sequence_data(pipemap)
data['fields']['inputs'].update(connector_data)
for connector in self._options.values():
connector_data, pipemap = connector.sequence_data(pipemap)
data['fields']['options'].update(connector_data)
### replace output pipe IDs with variable id
for connector in self.output_connectors():
connector_data, pipemap = connector.sequence_data(pipemap)
data['fields']['outputs'].update(connector_data)
data.update({'name': self.name(),
'path': self.path(),
'type': self.__class__.__name__})
return (self._PLUGIN, self._OPCODE, data), pipemap
def has_inputs(self):
if super().has_inputs():
return True
for connector in self._options.values():
if connector.pipe():
return True
return False
@classmethod
@abstractmethod
def file_extension(cls):
pass
def path(self):
if self._folder:
return self._folder.path()
else:
return None
def options(self):
return self._options
def option_connectors(self):
return tuple(self._options.values())
def update_options_value(self, tag, value):
for connector in self.connectors():
if connector.name() == tag:
connector.set_value(value)
def is_connected(self, pipe_id):
for connector in self.input_connectors():
if connector.pipe() and id(connector.pipe()) == pipe_id:
return True
return False
### manipulate object
def register_pipe(self, style, connector, pipe):
super(FunctionNodeTemplate, self).register_pipe(style, connector, pipe)
if style == 'options':
self._options[connector].register_pipe(pipe)
# def update_options_value(self, tag, value):
#
# self._options.update({tag: value})
#
# dispatcher.send(signal = self._FUNCTION_NODE_OPTIONS_VALUE_UPDATED, sender= self, tag= tag, value= value)
def remove_pipe(self, deleted_pipe):
### whole cleanup
super(FunctionNodeTemplate, self).remove_pipe(deleted_pipe)
self._options = {slot: pipe for slot, pipe in self._options.items() if pipe != deleted_pipe}
def connectors(self):
options = list(self._options.values())
return super().connectors() + options
class RunSequenceAdditions():
@classmethod
def _get_new_variable_id(cls, pipe_map):
# determine new variable_id (the lowest unused)
variable_id = 1
used_variable_ids = [value[0] for value in pipe_map.values()]
for variable_id in used_variable_ids:
variable_id += 1
return variable_id
def sequence_init_procedure(self, nodes):
pass
def sequence_pre_append_procedure(self, node):
pass
def sequence_post_append(self, node):
pass
def priority_node(self, nodes):
return None
def sequence_post_creation(self, sequence):
return sequence
def create_sequence_item(self, node, branchs, pipe_map):
code, pipe_map = node.sequence_data(branchs, pipe_map)
return code, pipe_map
if __name__ == '__main__':
def func(name, **kwargs):
print(name)
print(kwargs)
| ChrisCuts/fnode | src/PlugIns/NodeTemplates.py | Python | gpl-2.0 | 16,591 |
#!/usr/bin/env python
'''
SubSynco - a tool for synchronizing subtitle files
Copyright (C) 2015 da-mkay
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject
from gi.repository import Gst
from gi.repository import GstVideo
class TimeClbFilter(GstVideo.VideoFilter):
"""A VideoFilter that does no modifications but invokes a callback
method for each frames timestamp.
"""
# based on textoverlay.get_static_pad('video_sink')
# .query_caps(None).to_string())
_caps = ('video/x-raw, format=(string){ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA,'
' ARGB, ABGR, RGB, BGR, I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, '
'Y42B, Y444, NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }, '
'width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], '
'framerate=(fraction)[ 0/1, 2147483647/1 ]')
__gstmetadata__ = (
'TimeClbFilter plugin', # longname
'Filter/Video', # classification
("A VideoFilter that does no modifications but invokes a callback "
"method for each frame's timestamp."), # description
'[email protected]' # contact
)
_srctemplate = Gst.PadTemplate.new(
'src', # must be 'src' for a VideoFilter
Gst.PadDirection.SRC,
Gst.PadPresence.ALWAYS,
Gst.caps_from_string(_caps)
)
_sinktemplate = Gst.PadTemplate.new(
'sink', # must be 'sink' for a VideoFilter
Gst.PadDirection.SINK,
Gst.PadPresence.ALWAYS,
Gst.caps_from_string(_caps)
)
# register pad templates
__gsttemplates__ = (_sinktemplate, _srctemplate)
def __init__(self, timer_callback=None):
super(TimeClbFilter, self).__init__()
self.set_passthrough(True)
self.set_timer_callback(timer_callback)
def set_timer_callback(self, callback):
"""Set the callback function that will be called for each
frame's timestamp (in nanoseconds).
"""
self._timer_callback = callback
def do_transform_frame_ip(self, frame):
# Invoke timer callback (if any) and forward buffer to src
if (self._timer_callback is not None):
self._timer_callback(frame.buffer.pts) # nanos
return Gst.FlowReturn.OK
def _init_plugin(plugin, userarg):
# Before registering the filter plugin we must ensure that the
# plugin's metadata and pad templates are set.
# Usually the data should be set automatically based on the
# __gstmetadata__ and __gsttemplates__ attributes. This works as
# expected on:
# Xubuntu 14.04
# (python-gst-1.0 [1.2.0-1], gstreamer1.0-x [1.2.4-1])
# Xubuntu 15.05
# (python-gst-1.0 [1.2.1-1.1], gstreamer1.0-x [1.4.5-1])
# However on Windows 7 running PyGI 3.14.0 AIO rev19 (including
# gstreamer 1.4.5) these values must be set explicitly using
# set_metadata and add_pad_template. If we would not set the values
# explicitly we would get the following warning/error:
# GStreamer-WARNING **: Element factory metadata for
# 'timeclbfilter' has no valid long-name field
# CRITICAL **: gst_base_transform_init: assertion 'pad_template !=
# NULL' failed
try:
TimeClbFilter.set_metadata(*TimeClbFilter.__gstmetadata__)
TimeClbFilter.add_pad_template(TimeClbFilter._sinktemplate)
TimeClbFilter.add_pad_template(TimeClbFilter._srctemplate)
except AttributeError:
pass
TimeClbFilterType = GObject.type_register(TimeClbFilter)
Gst.Element.register(plugin, 'timeclbfilter', 0, TimeClbFilterType)
return True
version = Gst.version()
Gst.Plugin.register_static_full(
version[0], # GST_VERSION_MAJOR
version[1], # GST_VERSION_MINOR
'TimeClbFilter plugin',
("A VideoFilter that does no modifications but invokes a callback method "
"for each frame's timestamp."),
_init_plugin,
'1.0',
'GPL',
'filter',
'subsynco.gst',
'http://subsynco.org',
None
)
| da-mkay/subsynco | src/subsynco/gst/filter.py | Python | gpl-3.0 | 4,647 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DefaultSecurityRulesOperations:
"""DefaultSecurityRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.SecurityRuleListResult"]:
"""Gets all default security rules in a network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_08_01.models.SecurityRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules'} # type: ignore
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
default_security_rule_name: str,
**kwargs: Any
) -> "_models.SecurityRule":
"""Get the specified default network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param default_security_rule_name: The name of the default security rule.
:type default_security_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.SecurityRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'defaultSecurityRuleName': self._serialize.url("default_security_rule_name", default_security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules/{defaultSecurityRuleName}'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/aio/operations/_default_security_rules_operations.py | Python | mit | 8,854 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from lxml import etree as ET
except ImportError:
from xml.etree import ElementTree as ET
import sys
from types import GeneratorType
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.dimensiondata import DimensionDataAPIException, NetworkDomainServicePlan
from libcloud.common.dimensiondata import DimensionDataServerCpuSpecification, DimensionDataServerDisk, DimensionDataServerVMWareTools
from libcloud.common.dimensiondata import DimensionDataTag, DimensionDataTagKey
from libcloud.common.dimensiondata import DimensionDataIpAddress, \
DimensionDataIpAddressList, DimensionDataChildIpAddressList, \
DimensionDataPortList, DimensionDataPort, DimensionDataChildPortList
from libcloud.common.dimensiondata import TYPES_URN
from libcloud.compute.drivers.dimensiondata import DimensionDataNodeDriver as DimensionData
from libcloud.compute.drivers.dimensiondata import DimensionDataNic
from libcloud.compute.base import Node, NodeAuthPassword, NodeLocation
from libcloud.test import MockHttp, unittest
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.test.secrets import DIMENSIONDATA_PARAMS
from libcloud.utils.xml import fixxpath, findtext, findall
class DimensionData_v2_4_Tests(unittest.TestCase):
def setUp(self):
DimensionData.connectionCls.active_api_version = '2.4'
DimensionData.connectionCls.conn_class = DimensionDataMockHttp
DimensionDataMockHttp.type = None
self.driver = DimensionData(*DIMENSIONDATA_PARAMS)
def test_invalid_region(self):
with self.assertRaises(ValueError):
DimensionData(*DIMENSIONDATA_PARAMS, region='blah')
def test_invalid_creds(self):
DimensionDataMockHttp.type = 'UNAUTHORIZED'
with self.assertRaises(InvalidCredsError):
self.driver.list_nodes()
def test_get_account_details(self):
DimensionDataMockHttp.type = None
ret = self.driver.connection.get_account_details()
self.assertEqual(ret.full_name, 'Test User')
self.assertEqual(ret.first_name, 'Test')
self.assertEqual(ret.email, '[email protected]')
def test_list_locations_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_locations()
self.assertEqual(len(ret), 5)
first_loc = ret[0]
self.assertEqual(first_loc.id, 'NA3')
self.assertEqual(first_loc.name, 'US - West')
self.assertEqual(first_loc.country, 'US')
def test_list_nodes_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertEqual(len(ret), 7)
def test_node_extras(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertTrue(isinstance(ret[0].extra['vmWareTools'], DimensionDataServerVMWareTools))
self.assertTrue(isinstance(ret[0].extra['cpu'], DimensionDataServerCpuSpecification))
self.assertTrue(isinstance(ret[0].extra['disks'], list))
self.assertTrue(isinstance(ret[0].extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(ret[0].extra['disks'][0].size_gb, 10)
self.assertTrue(isinstance(ret[1].extra['disks'], list))
self.assertTrue(isinstance(ret[1].extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(ret[1].extra['disks'][0].size_gb, 10)
def test_server_states(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes()
self.assertTrue(ret[0].state == 'running')
self.assertTrue(ret[1].state == 'starting')
self.assertTrue(ret[2].state == 'stopping')
self.assertTrue(ret[3].state == 'reconfiguring')
self.assertTrue(ret[4].state == 'running')
self.assertTrue(ret[5].state == 'terminated')
self.assertTrue(ret[6].state == 'stopped')
self.assertEqual(len(ret), 7)
def test_list_nodes_response_PAGINATED(self):
DimensionDataMockHttp.type = 'PAGINATED'
ret = self.driver.list_nodes()
self.assertEqual(len(ret), 9)
def test_paginated_mcp2_call_EMPTY(self):
# cache org
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'EMPTY'
node_list_generator = self.driver.connection.paginated_request_with_orgId_api_2('server/server')
empty_node_list = []
for node_list in node_list_generator:
empty_node_list.extend(node_list)
self.assertTrue(len(empty_node_list) == 0)
def test_paginated_mcp2_call_PAGED_THEN_EMPTY(self):
# cache org
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'PAGED_THEN_EMPTY'
node_list_generator = self.driver.connection.paginated_request_with_orgId_api_2('server/server')
final_node_list = []
for node_list in node_list_generator:
final_node_list.extend(node_list)
self.assertTrue(len(final_node_list) == 2)
def test_paginated_mcp2_call_with_page_size(self):
# cache org
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'PAGESIZE50'
node_list_generator = self.driver.connection.paginated_request_with_orgId_api_2('server/server', page_size=50)
self.assertTrue(isinstance(node_list_generator, GeneratorType))
# We're making sure here the filters make it to the URL
# See _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_ALLFILTERS for asserts
def test_list_nodes_response_strings_ALLFILTERS(self):
DimensionDataMockHttp.type = 'ALLFILTERS'
ret = self.driver.list_nodes(ex_location='fake_loc', ex_name='fake_name',
ex_ipv6='fake_ipv6', ex_ipv4='fake_ipv4', ex_vlan='fake_vlan',
ex_image='fake_image', ex_deployed=True,
ex_started=True, ex_state='fake_state',
ex_network='fake_network', ex_network_domain='fake_network_domain')
self.assertTrue(isinstance(ret, list))
self.assertEqual(len(ret), 7)
node = ret[3]
self.assertTrue(isinstance(node.extra['disks'], list))
self.assertTrue(isinstance(node.extra['disks'][0], DimensionDataServerDisk))
self.assertEqual(node.size.id, '1')
self.assertEqual(node.image.id, '3ebf3c0f-90fe-4a8b-8585-6e65b316592c')
self.assertEqual(node.image.name, 'WIN2008S/32')
disk = node.extra['disks'][0]
self.assertEqual(disk.id, "c2e1f199-116e-4dbc-9960-68720b832b0a")
self.assertEqual(disk.scsi_id, 0)
self.assertEqual(disk.size_gb, 50)
self.assertEqual(disk.speed, "STANDARD")
self.assertEqual(disk.state, "NORMAL")
def test_list_nodes_response_LOCATION(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_locations()
first_loc = ret[0]
ret = self.driver.list_nodes(ex_location=first_loc)
for node in ret:
self.assertEqual(node.extra['datacenterId'], 'NA3')
def test_list_nodes_response_LOCATION_STR(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_nodes(ex_location='NA3')
for node in ret:
self.assertEqual(node.extra['datacenterId'], 'NA3')
def test_list_sizes_response(self):
DimensionDataMockHttp.type = None
ret = self.driver.list_sizes()
self.assertEqual(len(ret), 1)
size = ret[0]
self.assertEqual(size.name, 'default')
def test_reboot_node_response(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = node.reboot()
self.assertTrue(ret is True)
def test_reboot_node_response_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
node.reboot()
def test_destroy_node_response(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = node.destroy()
self.assertTrue(ret is True)
def test_destroy_node_response_RESOURCE_BUSY(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
node.destroy()
def test_list_images(self):
images = self.driver.list_images()
self.assertEqual(len(images), 3)
self.assertEqual(images[0].name, 'RedHat 6 64-bit 2 CPU')
self.assertEqual(images[0].id, 'c14b1a46-2428-44c1-9c1a-b20e6418d08c')
self.assertEqual(images[0].extra['location'].id, 'NA9')
self.assertEqual(images[0].extra['cpu'].cpu_count, 2)
self.assertEqual(images[0].extra['OS_displayName'], 'REDHAT6/64')
def test_clean_failed_deployment_response_with_node(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_clean_failed_deployment(node)
self.assertTrue(ret is True)
def test_clean_failed_deployment_response_with_node_id(self):
node = 'e75ead52-692f-4314-8725-c8a4f4d13a87'
ret = self.driver.ex_clean_failed_deployment(node)
self.assertTrue(ret is True)
def test_ex_list_customer_images(self):
images = self.driver.ex_list_customer_images()
self.assertEqual(len(images), 3)
self.assertEqual(images[0].name, 'ImportedCustomerImage')
self.assertEqual(images[0].id, '5234e5c7-01de-4411-8b6e-baeb8d91cf5d')
self.assertEqual(images[0].extra['location'].id, 'NA9')
self.assertEqual(images[0].extra['cpu'].cpu_count, 4)
self.assertEqual(images[0].extra['OS_displayName'], 'REDHAT6/64')
def test_create_mcp1_node_optional_param(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
network = self.driver.ex_list_networks()[0]
cpu_spec = DimensionDataServerCpuSpecification(cpu_count='4',
cores_per_socket='2',
performance='STANDARD')
disks = [DimensionDataServerDisk(scsi_id='0', speed='HIGHPERFORMANCE')]
node = self.driver.create_node(name='test2', image=image, auth=root_pw,
ex_description='test2 node',
ex_network=network,
ex_is_started=False,
ex_memory_gb=8,
ex_disks=disks,
ex_cpu_specification=cpu_spec,
ex_primary_dns='10.0.0.5',
ex_secondary_dns='10.0.0.6'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_mcp1_node_response_no_pass_random_gen(self):
image = self.driver.list_images()[0]
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node',
ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' in node.extra)
def test_create_mcp1_node_response_no_pass_customer_windows(self):
image = self.driver.ex_list_customer_images()[1]
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' in node.extra)
def test_create_mcp1_node_response_no_pass_customer_windows_STR(self):
image = self.driver.ex_list_customer_images()[1].id
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' in node.extra)
def test_create_mcp1_node_response_no_pass_customer_linux(self):
image = self.driver.ex_list_customer_images()[0]
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' not in node.extra)
def test_create_mcp1_node_response_no_pass_customer_linux_STR(self):
image = self.driver.ex_list_customer_images()[0].id
network = self.driver.ex_list_networks()[0]
node = self.driver.create_node(name='test2', image=image, auth=None,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
self.assertTrue('password' not in node.extra)
def test_create_mcp1_node_response_STR(self):
rootPw = 'pass123'
image = self.driver.list_images()[0].id
network = self.driver.ex_list_networks()[0].id
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node', ex_network=network,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_response_network_domain(self):
rootPw = NodeAuthPassword('pass123')
location = self.driver.ex_get_location_by_id('NA9')
image = self.driver.list_images(location=location)[0]
network_domain = self.driver.ex_list_network_domains(location=location)[0]
vlan = self.driver.ex_list_vlans(location=location)[0]
cpu = DimensionDataServerCpuSpecification(
cpu_count=4,
cores_per_socket=1,
performance='HIGHPERFORMANCE'
)
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node',
ex_network_domain=network_domain,
ex_vlan=vlan,
ex_is_started=False, ex_cpu_specification=cpu,
ex_memory_gb=4)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_response_network_domain_STR(self):
rootPw = NodeAuthPassword('pass123')
location = self.driver.ex_get_location_by_id('NA9')
image = self.driver.list_images(location=location)[0]
network_domain = self.driver.ex_list_network_domains(location=location)[0].id
vlan = self.driver.ex_list_vlans(location=location)[0].id
cpu = DimensionDataServerCpuSpecification(
cpu_count=4,
cores_per_socket=1,
performance='HIGHPERFORMANCE'
)
node = self.driver.create_node(name='test2', image=image, auth=rootPw,
ex_description='test2 node',
ex_network_domain=network_domain,
ex_vlan=vlan,
ex_is_started=False, ex_cpu_specification=cpu,
ex_memory_gb=4)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_mcp1_node_no_network(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(InvalidRequestError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network=None,
ex_is_started=False)
def test_create_node_mcp1_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network='fakenetwork',
ex_primary_ipv4='10.0.0.1',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_mcp1_network(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network='fakenetwork',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_mcp2_vlan(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_vlan='fakevlan',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_mcp2_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_ipv4='10.0.0.1',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_network_domain_no_vlan_or_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_is_started=False)
def test_create_node_response(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_vlan='fakevlan'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_ms_time_zone(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_vlan='fakevlan',
ex_microsoft_time_zone='040'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_ambigious_mcps_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_network='fakenetwork',
ex_primary_nic_vlan='fakevlan'
)
def test_create_node_no_network_domain_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_primary_nic_vlan='fakevlan'
)
def test_create_node_no_primary_nic_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain'
)
def test_create_node_primary_vlan_nic(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_vlan='fakevlan',
ex_primary_nic_network_adapter='v1000'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_primary_ipv4(self):
rootPw = 'pass123'
image = self.driver.list_images()[0]
node = self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1'
)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_both_primary_nic_and_vlan_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(
name='test3',
image=image,
auth=rootPw,
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_primary_nic_vlan='fakevlan'
)
def test_create_node_cpu_specification(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
cpu_spec = DimensionDataServerCpuSpecification(cpu_count='4',
cores_per_socket='2',
performance='STANDARD')
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_cpu_specification=cpu_spec)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_memory(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_memory_gb=8)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_disks(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
disks = [DimensionDataServerDisk(scsi_id='0', speed='HIGHPERFORMANCE')]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_disks=disks)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_disks_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
disks = 'blah'
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_disks=disks)
def test_create_node_ipv4_gateway(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_is_started=False,
ex_ipv4_gateway='10.2.2.2')
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_network_domain_no_vlan_no_ipv4_fail(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_is_started=False)
def test_create_node_mcp2_additional_nics_legacy(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
additional_vlans = ['fakevlan1', 'fakevlan2']
additional_ipv4 = ['10.0.0.2', '10.0.0.3']
node = self.driver.create_node(
name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_ipv4='10.0.0.1',
ex_additional_nics_vlan=additional_vlans,
ex_additional_nics_ipv4=additional_ipv4,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_bad_additional_nics_ipv4(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_vlan='fake_vlan',
ex_additional_nics_ipv4='badstring',
ex_is_started=False)
def test_create_node_additional_nics(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
nic1 = DimensionDataNic(vlan='fake_vlan',
network_adapter_name='v1000')
nic2 = DimensionDataNic(private_ip_v4='10.1.1.2',
network_adapter_name='v1000')
additional_nics = [nic1, nic2]
node = self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_create_node_additional_nics_vlan_ipv4_coexist_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
nic1 = DimensionDataNic(private_ip_v4='10.1.1.1', vlan='fake_vlan',
network_adapter_name='v1000')
nic2 = DimensionDataNic(private_ip_v4='10.1.1.2', vlan='fake_vlan2',
network_adapter_name='v1000')
additional_nics = [nic1, nic2]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False
)
def test_create_node_additional_nics_invalid_input_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
additional_nics = 'blah'
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False
)
def test_create_node_additional_nics_vlan_ipv4_not_exist_fail(self):
root_pw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
nic1 = DimensionDataNic(network_adapter_name='v1000')
nic2 = DimensionDataNic(network_adapter_name='v1000')
additional_nics = [nic1, nic2]
with self.assertRaises(ValueError):
self.driver.create_node(name='test2',
image=image,
auth=root_pw,
ex_description='test2 node',
ex_network_domain='fakenetworkdomain',
ex_primary_nic_private_ipv4='10.0.0.1',
ex_additional_nics=additional_nics,
ex_is_started=False)
def test_create_node_bad_additional_nics_vlan(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
with self.assertRaises(TypeError):
self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test2 node',
ex_network_domain='fake_network_domain',
ex_vlan='fake_vlan',
ex_additional_nics_vlan='badstring',
ex_is_started=False)
def test_create_node_mcp2_indicate_dns(self):
rootPw = NodeAuthPassword('pass123')
image = self.driver.list_images()[0]
node = self.driver.create_node(name='test2',
image=image,
auth=rootPw,
ex_description='test node dns',
ex_network_domain='fakenetworkdomain',
ex_primary_ipv4='10.0.0.1',
ex_primary_dns='8.8.8.8',
ex_secondary_dns='8.8.4.4',
ex_is_started=False)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
self.assertEqual(node.extra['status'].action, 'DEPLOY_SERVER')
def test_ex_shutdown_graceful(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_shutdown_graceful(node)
self.assertTrue(ret is True)
def test_ex_shutdown_graceful_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_shutdown_graceful(node)
def test_ex_start_node(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_start_node(node)
self.assertTrue(ret is True)
def test_ex_start_node_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_start_node(node)
def test_ex_power_off(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_power_off(node)
self.assertTrue(ret is True)
def test_ex_update_vm_tools(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_update_vm_tools(node)
self.assertTrue(ret is True)
def test_ex_power_off_INPROGRESS(self):
DimensionDataMockHttp.type = 'INPROGRESS'
node = Node(id='11', name=None, state='STOPPING',
public_ips=None, private_ips=None, driver=self.driver)
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_power_off(node)
def test_ex_reset(self):
node = Node(id='11', name=None, state=None,
public_ips=None, private_ips=None, driver=self.driver)
ret = self.driver.ex_reset(node)
self.assertTrue(ret is True)
def test_ex_attach_node_to_vlan(self):
node = self.driver.ex_get_node_by_id('e75ead52-692f-4314-8725-c8a4f4d13a87')
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
ret = self.driver.ex_attach_node_to_vlan(node, vlan)
self.assertTrue(ret is True)
def test_ex_destroy_nic(self):
node = self.driver.ex_destroy_nic('a202e51b-41c0-4cfc-add0-b1c62fc0ecf6')
self.assertTrue(node)
def test_list_networks(self):
nets = self.driver.list_networks()
self.assertEqual(nets[0].name, 'test-net1')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_create_network(self):
location = self.driver.ex_get_location_by_id('NA9')
net = self.driver.ex_create_network(location, "Test Network", "test")
self.assertEqual(net.id, "208e3a8e-9d2f-11e2-b29c-001517c4643e")
self.assertEqual(net.name, "Test Network")
def test_ex_create_network_NO_DESCRIPTION(self):
location = self.driver.ex_get_location_by_id('NA9')
net = self.driver.ex_create_network(location, "Test Network")
self.assertEqual(net.id, "208e3a8e-9d2f-11e2-b29c-001517c4643e")
self.assertEqual(net.name, "Test Network")
def test_ex_delete_network(self):
net = self.driver.ex_list_networks()[0]
result = self.driver.ex_delete_network(net)
self.assertTrue(result)
def test_ex_rename_network(self):
net = self.driver.ex_list_networks()[0]
result = self.driver.ex_rename_network(net, "barry")
self.assertTrue(result)
def test_ex_create_network_domain(self):
location = self.driver.ex_get_location_by_id('NA9')
plan = NetworkDomainServicePlan.ADVANCED
net = self.driver.ex_create_network_domain(location=location,
name='test',
description='test',
service_plan=plan)
self.assertEqual(net.name, 'test')
self.assertTrue(net.id, 'f14a871f-9a25-470c-aef8-51e13202e1aa')
def test_ex_create_network_domain_NO_DESCRIPTION(self):
location = self.driver.ex_get_location_by_id('NA9')
plan = NetworkDomainServicePlan.ADVANCED
net = self.driver.ex_create_network_domain(location=location,
name='test',
service_plan=plan)
self.assertEqual(net.name, 'test')
self.assertTrue(net.id, 'f14a871f-9a25-470c-aef8-51e13202e1aa')
def test_ex_get_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(net.id, '8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(net.description, 'test2')
self.assertEqual(net.name, 'test')
def test_ex_update_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
net.name = 'new name'
net2 = self.driver.ex_update_network_domain(net)
self.assertEqual(net2.name, 'new name')
def test_ex_delete_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
result = self.driver.ex_delete_network_domain(net)
self.assertTrue(result)
def test_ex_list_networks(self):
nets = self.driver.ex_list_networks()
self.assertEqual(nets[0].name, 'test-net1')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_network_domains(self):
nets = self.driver.ex_list_network_domains()
self.assertEqual(nets[0].name, 'Aurora')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_network_domains_ALLFILTERS(self):
DimensionDataMockHttp.type = 'ALLFILTERS'
nets = self.driver.ex_list_network_domains(location='fake_location', name='fake_name',
service_plan='fake_plan', state='fake_state')
self.assertEqual(nets[0].name, 'Aurora')
self.assertTrue(isinstance(nets[0].location, NodeLocation))
def test_ex_list_vlans(self):
vlans = self.driver.ex_list_vlans()
self.assertEqual(vlans[0].name, "Primary")
def test_ex_list_vlans_ALLFILTERS(self):
DimensionDataMockHttp.type = 'ALLFILTERS'
vlans = self.driver.ex_list_vlans(location='fake_location', network_domain='fake_network_domain',
name='fake_name', ipv4_address='fake_ipv4', ipv6_address='fake_ipv6', state='fake_state')
self.assertEqual(vlans[0].name, "Primary")
def test_ex_create_vlan(self,):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
vlan = self.driver.ex_create_vlan(network_domain=net,
name='test',
private_ipv4_base_address='10.3.4.0',
private_ipv4_prefix_size='24',
description='test vlan')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
def test_ex_create_vlan_NO_DESCRIPTION(self,):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
vlan = self.driver.ex_create_vlan(network_domain=net,
name='test',
private_ipv4_base_address='10.3.4.0',
private_ipv4_prefix_size='24')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
def test_ex_get_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
self.assertEqual(vlan.id, '0e56433f-d808-4669-821d-812769517ff8')
self.assertEqual(vlan.description, 'test2')
self.assertEqual(vlan.status, 'NORMAL')
self.assertEqual(vlan.name, 'Production VLAN')
self.assertEqual(vlan.private_ipv4_range_address, '10.0.3.0')
self.assertEqual(vlan.private_ipv4_range_size, 24)
self.assertEqual(vlan.ipv6_range_size, 64)
self.assertEqual(vlan.ipv6_range_address, '2607:f480:1111:1153:0:0:0:0')
self.assertEqual(vlan.ipv4_gateway, '10.0.3.1')
self.assertEqual(vlan.ipv6_gateway, '2607:f480:1111:1153:0:0:0:1')
def test_ex_wait_for_state(self):
self.driver.ex_wait_for_state('NORMAL',
self.driver.ex_get_vlan,
vlan_id='0e56433f-d808-4669-821d-812769517ff8')
def test_ex_wait_for_state_NODE(self):
self.driver.ex_wait_for_state('running',
self.driver.ex_get_node_by_id,
id='e75ead52-692f-4314-8725-c8a4f4d13a87')
def test_ex_wait_for_state_FAIL(self):
with self.assertRaises(DimensionDataAPIException) as context:
self.driver.ex_wait_for_state('starting',
self.driver.ex_get_node_by_id,
id='e75ead52-692f-4314-8725-c8a4f4d13a87',
timeout=2
)
self.assertEqual(context.exception.code, 'running')
self.assertTrue('timed out' in context.exception.msg)
def test_ex_update_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
vlan.name = 'new name'
vlan2 = self.driver.ex_update_vlan(vlan)
self.assertEqual(vlan2.name, 'new name')
def test_ex_delete_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
result = self.driver.ex_delete_vlan(vlan)
self.assertTrue(result)
def test_ex_expand_vlan(self):
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
vlan.private_ipv4_range_size = '23'
vlan = self.driver.ex_expand_vlan(vlan)
self.assertEqual(vlan.private_ipv4_range_size, '23')
def test_ex_add_public_ip_block_to_network_domain(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
block = self.driver.ex_add_public_ip_block_to_network_domain(net)
self.assertEqual(block.id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
def test_ex_list_public_ip_blocks(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
blocks = self.driver.ex_list_public_ip_blocks(net)
self.assertEqual(blocks[0].base_ip, '168.128.4.18')
self.assertEqual(blocks[0].size, '2')
self.assertEqual(blocks[0].id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(blocks[0].location.id, 'NA9')
self.assertEqual(blocks[0].network_domain.id, net.id)
def test_ex_get_public_ip_block(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
block = self.driver.ex_get_public_ip_block('9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(block.base_ip, '168.128.4.18')
self.assertEqual(block.size, '2')
self.assertEqual(block.id, '9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
self.assertEqual(block.location.id, 'NA9')
self.assertEqual(block.network_domain.id, net.id)
def test_ex_delete_public_ip_block(self):
block = self.driver.ex_get_public_ip_block('9945dc4a-bdce-11e4-8c14-b8ca3a5d9ef8')
result = self.driver.ex_delete_public_ip_block(block)
self.assertTrue(result)
def test_ex_list_firewall_rules(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
self.assertEqual(rules[0].id, '756cba02-b0bc-48f4-aea5-9445870b6148')
self.assertEqual(rules[0].network_domain.id, '8cdfd607-f429-4df6-9352-162cfc0891be')
self.assertEqual(rules[0].name, 'CCDEFAULT.BlockOutboundMailIPv4')
self.assertEqual(rules[0].action, 'DROP')
self.assertEqual(rules[0].ip_version, 'IPV4')
self.assertEqual(rules[0].protocol, 'TCP')
self.assertEqual(rules[0].source.ip_address, 'ANY')
self.assertTrue(rules[0].source.any_ip)
self.assertTrue(rules[0].destination.any_ip)
def test_ex_create_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
rule = self.driver.ex_create_firewall_rule(net, rules[0], 'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_with_specific_source_ip(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
specific_source_ip_rule = list(filter(lambda x: x.name == 'SpecificSourceIP',
rules))[0]
rule = self.driver.ex_create_firewall_rule(net, specific_source_ip_rule, 'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_with_source_ip(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
specific_source_ip_rule = \
list(filter(lambda x: x.name == 'SpecificSourceIP',
rules))[0]
specific_source_ip_rule.source.any_ip = False
specific_source_ip_rule.source.ip_address = '10.0.0.1'
specific_source_ip_rule.source.ip_prefix_size = '15'
rule = self.driver.ex_create_firewall_rule(net,
specific_source_ip_rule,
'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_with_any_ip(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
specific_source_ip_rule = \
list(filter(lambda x: x.name == 'SpecificSourceIP',
rules))[0]
specific_source_ip_rule.source.any_ip = True
rule = self.driver.ex_create_firewall_rule(net,
specific_source_ip_rule,
'FIRST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_ip_prefix_size(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.address_list_id = None
rule.source.any_ip = False
rule.source.ip_address = '10.2.1.1'
rule.source.ip_prefix_size = '10'
rule.destination.address_list_id = None
rule.destination.any_ip = False
rule.destination.ip_address = '10.0.0.1'
rule.destination.ip_prefix_size = '20'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_address_list(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.address_list_id = '12345'
rule.destination.address_list_id = '12345'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_port_list(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.port_list_id = '12345'
rule.destination.port_list_id = '12345'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_port(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_list_firewall_rules(net)[0]
rule.source.port_list_id = None
rule.source.port_begin = '8000'
rule.source.port_end = '8005'
rule.destination.port_list_id = None
rule.destination.port_begin = '7000'
rule.destination.port_end = '7005'
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_ALL_VALUES(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
for rule in rules:
self.driver.ex_create_firewall_rule(net, rule, 'LAST')
def test_ex_create_firewall_rule_WITH_POSITION_RULE(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
rule = self.driver.ex_create_firewall_rule(net, rules[-2], 'BEFORE', rules[-1])
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_WITH_POSITION_RULE_STR(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
rule = self.driver.ex_create_firewall_rule(net, rules[-2], 'BEFORE', 'RULE_WITH_SOURCE_AND_DEST')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_create_firewall_rule_FAIL_POSITION(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
with self.assertRaises(ValueError):
self.driver.ex_create_firewall_rule(net, rules[0], 'BEFORE')
def test_ex_create_firewall_rule_FAIL_POSITION_WITH_RULE(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_firewall_rules(net)
with self.assertRaises(ValueError):
self.driver.ex_create_firewall_rule(net, rules[0], 'LAST', 'RULE_WITH_SOURCE_AND_DEST')
def test_ex_get_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
self.assertEqual(rule.id, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
def test_ex_set_firewall_rule_state(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
result = self.driver.ex_set_firewall_rule_state(rule, False)
self.assertTrue(result)
def test_ex_delete_firewall_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
result = self.driver.ex_delete_firewall_rule(rule)
self.assertTrue(result)
def test_ex_edit_firewall_rule(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.any_ip = True
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_ipaddresslist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.address_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
rule.source.any_ip = False
rule.source.ip_address = '10.0.0.1'
rule.source.ip_prefix_size = 10
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_ipaddresslist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.address_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
rule.destination.any_ip = False
rule.destination.ip_address = '10.0.0.1'
rule.destination.ip_prefix_size = 10
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_ipaddress(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.address_list_id = None
rule.source.any_ip = False
rule.source.ip_address = '10.0.0.1'
rule.source.ip_prefix_size = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_ipaddress(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.address_list_id = None
rule.destination.any_ip = False
rule.destination.ip_address = '10.0.0.1'
rule.destination.ip_prefix_size = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_with_relative_rule(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
placement_rule = self.driver.ex_list_firewall_rules(
network_domain=net)[-1]
result = self.driver.ex_edit_firewall_rule(
rule=rule, position='BEFORE',
relative_rule_for_position=placement_rule)
self.assertTrue(result)
def test_ex_edit_firewall_rule_with_relative_rule_by_name(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
placement_rule = self.driver.ex_list_firewall_rules(
network_domain=net)[-1]
result = self.driver.ex_edit_firewall_rule(
rule=rule, position='BEFORE',
relative_rule_for_position=placement_rule.name)
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_portlist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.port_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_source_port(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.source.port_list_id = None
rule.source.port_begin = '3'
rule.source.port_end = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_portlist(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.port_list_id = '802abc9f-45a7-4efb-9d5a-810082368222'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_destination_port(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
rule.destination.port_list_id = None
rule.destination.port_begin = '3'
rule.destination.port_end = '10'
result = self.driver.ex_edit_firewall_rule(rule=rule, position='LAST')
self.assertTrue(result)
def test_ex_edit_firewall_rule_invalid_position_fail(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
with self.assertRaises(ValueError):
self.driver.ex_edit_firewall_rule(rule=rule, position='BEFORE')
def test_ex_edit_firewall_rule_invalid_position_relative_rule_fail(self):
net = self.driver.ex_get_network_domain(
'8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_firewall_rule(
net, 'd0a20f59-77b9-4f28-a63b-e58496b73a6c')
relative_rule = self.driver.ex_list_firewall_rules(
network_domain=net)[-1]
with self.assertRaises(ValueError):
self.driver.ex_edit_firewall_rule(rule=rule, position='FIRST',
relative_rule_for_position=relative_rule)
def test_ex_create_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_create_nat_rule(net, '1.2.3.4', '4.3.2.1')
self.assertEqual(rule.id, 'd31c2db0-be6b-4d50-8744-9a7a534b5fba')
def test_ex_list_nat_rules(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rules = self.driver.ex_list_nat_rules(net)
self.assertEqual(rules[0].id, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rules[0].internal_ip, '10.0.0.15')
self.assertEqual(rules[0].external_ip, '165.180.12.18')
def test_ex_get_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_nat_rule(net, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rule.id, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
self.assertEqual(rule.internal_ip, '10.0.0.16')
self.assertEqual(rule.external_ip, '165.180.12.19')
def test_ex_delete_nat_rule(self):
net = self.driver.ex_get_network_domain('8cdfd607-f429-4df6-9352-162cfc0891be')
rule = self.driver.ex_get_nat_rule(net, '2187a636-7ebb-49a1-a2ff-5d617f496dce')
result = self.driver.ex_delete_nat_rule(rule)
self.assertTrue(result)
def test_ex_enable_monitoring(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_enable_monitoring(node, "ADVANCED")
self.assertTrue(result)
def test_ex_disable_monitoring(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_disable_monitoring(node)
self.assertTrue(result)
def test_ex_change_monitoring_plan(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_update_monitoring_plan(node, "ESSENTIALS")
self.assertTrue(result)
def test_ex_add_storage_to_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_add_storage_to_node(node, 30, 'PERFORMANCE')
self.assertTrue(result)
def test_ex_remove_storage_from_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_remove_storage_from_node(node, 0)
self.assertTrue(result)
def test_ex_change_storage_speed(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_change_storage_speed(node, 1, 'PERFORMANCE')
self.assertTrue(result)
def test_ex_change_storage_size(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_change_storage_size(node, 1, 100)
self.assertTrue(result)
def test_ex_clone_node_to_image(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_clone_node_to_image(node, 'my image', 'a description')
self.assertTrue(result)
def test_ex_update_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_update_node(node, 'my new name', 'a description', 2, 4048)
self.assertTrue(result)
def test_ex_reconfigure_node(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_reconfigure_node(node, 4, 4, 1, 'HIGHPERFORMANCE')
self.assertTrue(result)
def test_ex_get_location_by_id(self):
location = self.driver.ex_get_location_by_id('NA9')
self.assertTrue(location.id, 'NA9')
def test_ex_get_location_by_id_NO_LOCATION(self):
location = self.driver.ex_get_location_by_id(None)
self.assertIsNone(location)
def test_ex_get_base_image_by_id(self):
image_id = self.driver.list_images()[0].id
image = self.driver.ex_get_base_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'UNIX')
def test_ex_get_customer_image_by_id(self):
image_id = self.driver.ex_list_customer_images()[1].id
image = self.driver.ex_get_customer_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'WINDOWS')
def test_ex_get_image_by_id_base_img(self):
image_id = self.driver.list_images()[1].id
image = self.driver.ex_get_base_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'WINDOWS')
def test_ex_get_image_by_id_customer_img(self):
image_id = self.driver.ex_list_customer_images()[0].id
image = self.driver.ex_get_customer_image_by_id(image_id)
self.assertEqual(image.extra['OS_type'], 'UNIX')
def test_ex_get_image_by_id_customer_FAIL(self):
image_id = 'FAKE_IMAGE_ID'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_get_base_image_by_id(image_id)
def test_ex_create_anti_affinity_rule(self):
node_list = self.driver.list_nodes()
success = self.driver.ex_create_anti_affinity_rule([node_list[0], node_list[1]])
self.assertTrue(success)
def test_ex_create_anti_affinity_rule_TUPLE(self):
node_list = self.driver.list_nodes()
success = self.driver.ex_create_anti_affinity_rule((node_list[0], node_list[1]))
self.assertTrue(success)
def test_ex_create_anti_affinity_rule_TUPLE_STR(self):
node_list = self.driver.list_nodes()
success = self.driver.ex_create_anti_affinity_rule((node_list[0].id, node_list[1].id))
self.assertTrue(success)
def test_ex_create_anti_affinity_rule_FAIL_STR(self):
node_list = 'string'
with self.assertRaises(TypeError):
self.driver.ex_create_anti_affinity_rule(node_list)
def test_ex_create_anti_affinity_rule_FAIL_EXISTING(self):
node_list = self.driver.list_nodes()
DimensionDataMockHttp.type = 'FAIL_EXISTING'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_create_anti_affinity_rule((node_list[0], node_list[1]))
def test_ex_delete_anti_affinity_rule(self):
net_domain = self.driver.ex_list_network_domains()[0]
rule = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)[0]
success = self.driver.ex_delete_anti_affinity_rule(rule)
self.assertTrue(success)
def test_ex_delete_anti_affinity_rule_STR(self):
net_domain = self.driver.ex_list_network_domains()[0]
rule = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)[0]
success = self.driver.ex_delete_anti_affinity_rule(rule.id)
self.assertTrue(success)
def test_ex_delete_anti_affinity_rule_FAIL(self):
net_domain = self.driver.ex_list_network_domains()[0]
rule = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)[0]
DimensionDataMockHttp.type = 'FAIL'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_delete_anti_affinity_rule(rule)
def test_ex_list_anti_affinity_rules_NETWORK_DOMAIN(self):
net_domain = self.driver.ex_list_network_domains()[0]
rules = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_NETWORK(self):
network = self.driver.list_networks()[0]
rules = self.driver.ex_list_anti_affinity_rules(network=network)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_NODE(self):
node = self.driver.list_nodes()[0]
rules = self.driver.ex_list_anti_affinity_rules(node=node)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_PAGINATED(self):
net_domain = self.driver.ex_list_network_domains()[0]
DimensionDataMockHttp.type = 'PAGINATED'
rules = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain)
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 4)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_ALLFILTERS(self):
net_domain = self.driver.ex_list_network_domains()[0]
DimensionDataMockHttp.type = 'ALLFILTERS'
rules = self.driver.ex_list_anti_affinity_rules(network_domain=net_domain, filter_id='FAKE_ID', filter_state='FAKE_STATE')
self.assertTrue(isinstance(rules, list))
self.assertEqual(len(rules), 2)
self.assertTrue(isinstance(rules[0].id, str))
self.assertTrue(isinstance(rules[0].node_list, list))
def test_ex_list_anti_affinity_rules_BAD_ARGS(self):
with self.assertRaises(ValueError):
self.driver.ex_list_anti_affinity_rules(network='fake_network', network_domain='fake_network_domain')
def test_ex_create_tag_key(self):
success = self.driver.ex_create_tag_key('MyTestKey')
self.assertTrue(success)
def test_ex_create_tag_key_ALLPARAMS(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'ALLPARAMS'
success = self.driver.ex_create_tag_key('MyTestKey', description="Test Key Desc.", value_required=False, display_on_report=False)
self.assertTrue(success)
def test_ex_create_tag_key_BADREQUEST(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'BADREQUEST'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_create_tag_key('MyTestKey')
def test_ex_list_tag_keys(self):
tag_keys = self.driver.ex_list_tag_keys()
self.assertTrue(isinstance(tag_keys, list))
self.assertTrue(isinstance(tag_keys[0], DimensionDataTagKey))
self.assertTrue(isinstance(tag_keys[0].id, str))
def test_ex_list_tag_keys_ALLFILTERS(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'ALLFILTERS'
self.driver.ex_list_tag_keys(id='fake_id', name='fake_name', value_required=False, display_on_report=False)
def test_ex_get_tag_by_id(self):
tag = self.driver.ex_get_tag_key_by_id('d047c609-93d7-4bc5-8fc9-732c85840075')
self.assertTrue(isinstance(tag, DimensionDataTagKey))
def test_ex_get_tag_by_id_NOEXIST(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'NOEXIST'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_get_tag_key_by_id('d047c609-93d7-4bc5-8fc9-732c85840075')
def test_ex_get_tag_by_name(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'SINGLE'
tag = self.driver.ex_get_tag_key_by_name('LibcloudTest')
self.assertTrue(isinstance(tag, DimensionDataTagKey))
def test_ex_get_tag_by_name_NOEXIST(self):
with self.assertRaises(ValueError):
self.driver.ex_get_tag_key_by_name('LibcloudTest')
def test_ex_modify_tag_key_NAME(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NAME'
success = self.driver.ex_modify_tag_key(tag_key, name='NewName')
self.assertTrue(success)
def test_ex_modify_tag_key_NOTNAME(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NOTNAME'
success = self.driver.ex_modify_tag_key(tag_key, description='NewDesc', value_required=False, display_on_report=True)
self.assertTrue(success)
def test_ex_modify_tag_key_NOCHANGE(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NOCHANGE'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_modify_tag_key(tag_key)
def test_ex_remove_tag_key(self):
tag_key = self.driver.ex_list_tag_keys()[0]
success = self.driver.ex_remove_tag_key(tag_key)
self.assertTrue(success)
def test_ex_remove_tag_key_NOEXIST(self):
tag_key = self.driver.ex_list_tag_keys()[0]
DimensionDataMockHttp.type = 'NOEXIST'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_remove_tag_key(tag_key)
def test_ex_apply_tag_to_asset(self):
node = self.driver.list_nodes()[0]
success = self.driver.ex_apply_tag_to_asset(node, 'TagKeyName', 'FakeValue')
self.assertTrue(success)
def test_ex_apply_tag_to_asset_NOVALUE(self):
node = self.driver.list_nodes()[0]
DimensionDataMockHttp.type = 'NOVALUE'
success = self.driver.ex_apply_tag_to_asset(node, 'TagKeyName')
self.assertTrue(success)
def test_ex_apply_tag_to_asset_NOTAGKEY(self):
node = self.driver.list_nodes()[0]
DimensionDataMockHttp.type = 'NOTAGKEY'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_apply_tag_to_asset(node, 'TagKeyNam')
def test_ex_apply_tag_to_asset_BADASSETTYPE(self):
network = self.driver.list_networks()[0]
DimensionDataMockHttp.type = 'NOTAGKEY'
with self.assertRaises(TypeError):
self.driver.ex_apply_tag_to_asset(network, 'TagKeyNam')
def test_ex_remove_tag_from_asset(self):
node = self.driver.list_nodes()[0]
success = self.driver.ex_remove_tag_from_asset(node, 'TagKeyName')
self.assertTrue(success)
def test_ex_remove_tag_from_asset_NOTAG(self):
node = self.driver.list_nodes()[0]
DimensionDataMockHttp.type = 'NOTAG'
with self.assertRaises(DimensionDataAPIException):
self.driver.ex_remove_tag_from_asset(node, 'TagKeyNam')
def test_ex_list_tags(self):
tags = self.driver.ex_list_tags()
self.assertTrue(isinstance(tags, list))
self.assertTrue(isinstance(tags[0], DimensionDataTag))
self.assertTrue(len(tags) == 3)
def test_ex_list_tags_ALLPARAMS(self):
self.driver.connection._get_orgId()
DimensionDataMockHttp.type = 'ALLPARAMS'
tags = self.driver.ex_list_tags(asset_id='fake_asset_id', asset_type='fake_asset_type',
location='fake_location', tag_key_name='fake_tag_key_name',
tag_key_id='fake_tag_key_id', value='fake_value',
value_required=False, display_on_report=False)
self.assertTrue(isinstance(tags, list))
self.assertTrue(isinstance(tags[0], DimensionDataTag))
self.assertTrue(len(tags) == 3)
def test_priv_location_to_location_id(self):
location = self.driver.ex_get_location_by_id('NA9')
self.assertEqual(
self.driver._location_to_location_id(location),
'NA9'
)
def test_priv_location_to_location_id_STR(self):
self.assertEqual(
self.driver._location_to_location_id('NA9'),
'NA9'
)
def test_priv_location_to_location_id_TYPEERROR(self):
with self.assertRaises(TypeError):
self.driver._location_to_location_id([1, 2, 3])
def test_priv_image_needs_auth_os_img(self):
image = self.driver.list_images()[1]
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_os_img_STR(self):
image = self.driver.list_images()[1].id
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_windows(self):
image = self.driver.ex_list_customer_images()[1]
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_windows_STR(self):
image = self.driver.ex_list_customer_images()[1].id
self.assertTrue(self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_linux(self):
image = self.driver.ex_list_customer_images()[0]
self.assertTrue(not self.driver._image_needs_auth(image))
def test_priv_image_needs_auth_cust_img_linux_STR(self):
image = self.driver.ex_list_customer_images()[0].id
self.assertTrue(not self.driver._image_needs_auth(image))
def test_summary_usage_report(self):
report = self.driver.ex_summary_usage_report('2016-06-01', '2016-06-30')
report_content = report
self.assertEqual(len(report_content), 13)
self.assertEqual(len(report_content[0]), 6)
def test_detailed_usage_report(self):
report = self.driver.ex_detailed_usage_report('2016-06-01', '2016-06-30')
report_content = report
self.assertEqual(len(report_content), 42)
self.assertEqual(len(report_content[0]), 4)
def test_audit_log_report(self):
report = self.driver.ex_audit_log_report('2016-06-01', '2016-06-30')
report_content = report
self.assertEqual(len(report_content), 25)
self.assertEqual(report_content[2][2], 'OEC_SYSTEM')
def test_ex_list_ip_address_list(self):
net_domain = self.driver.ex_list_network_domains()[0]
ip_list = self.driver.ex_list_ip_address_list(
ex_network_domain=net_domain)
self.assertTrue(isinstance(ip_list, list))
self.assertEqual(len(ip_list), 4)
self.assertTrue(isinstance(ip_list[0].name, str))
self.assertTrue(isinstance(ip_list[0].description, str))
self.assertTrue(isinstance(ip_list[0].ip_version, str))
self.assertTrue(isinstance(ip_list[0].state, str))
self.assertTrue(isinstance(ip_list[0].create_time, str))
self.assertTrue(isinstance(ip_list[0].child_ip_address_lists, list))
self.assertEqual(len(ip_list[1].child_ip_address_lists), 1)
self.assertTrue(isinstance(ip_list[1].child_ip_address_lists[0].name,
str))
def test_ex_get_ip_address_list(self):
net_domain = self.driver.ex_list_network_domains()[0]
DimensionDataMockHttp.type = 'FILTERBYNAME'
ip_list = self.driver.ex_get_ip_address_list(
ex_network_domain=net_domain.id,
ex_ip_address_list_name='Test_IP_Address_List_3')
self.assertTrue(isinstance(ip_list, list))
self.assertEqual(len(ip_list), 1)
self.assertTrue(isinstance(ip_list[0].name, str))
self.assertTrue(isinstance(ip_list[0].description, str))
self.assertTrue(isinstance(ip_list[0].ip_version, str))
self.assertTrue(isinstance(ip_list[0].state, str))
self.assertTrue(isinstance(ip_list[0].create_time, str))
ips = ip_list[0].ip_address_collection
self.assertEqual(len(ips), 3)
self.assertTrue(isinstance(ips[0].begin, str))
self.assertTrue(isinstance(ips[0].prefix_size, str))
self.assertTrue(isinstance(ips[2].end, str))
def test_ex_create_ip_address_list_FAIL(self):
net_domain = self.driver.ex_list_network_domains()[0]
with self.assertRaises(TypeError):
self.driver.ex_create_ip_address_list(
ex_network_domain=net_domain.id)
def test_ex_create_ip_address_list(self):
name = "Test_IP_Address_List_3"
description = "Test Description"
ip_version = "IPV4"
child_ip_address_list_id = '0291ef78-4059-4bc1-b433-3f6ad698dc41'
child_ip_address_list = DimensionDataChildIpAddressList(
id=child_ip_address_list_id,
name="test_child_ip_addr_list")
net_domain = self.driver.ex_list_network_domains()[0]
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.100')
ip_address_2 = DimensionDataIpAddress(begin='190.2.2.106',
end='190.2.2.108')
ip_address_3 = DimensionDataIpAddress(begin='190.2.2.0',
prefix_size='24')
ip_address_collection = [ip_address_1, ip_address_2,
ip_address_3]
# Create IP Address List
success = self.driver.ex_create_ip_address_list(
ex_network_domain=net_domain, name=name,
ip_version=ip_version, description=description,
ip_address_collection=ip_address_collection,
child_ip_address_list=child_ip_address_list)
self.assertTrue(success)
def test_ex_create_ip_address_list_STR(self):
name = "Test_IP_Address_List_3"
description = "Test Description"
ip_version = "IPV4"
child_ip_address_list_id = '0291ef78-4059-4bc1-b433-3f6ad698dc41'
net_domain = self.driver.ex_list_network_domains()[0]
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.100')
ip_address_2 = DimensionDataIpAddress(begin='190.2.2.106',
end='190.2.2.108')
ip_address_3 = DimensionDataIpAddress(begin='190.2.2.0',
prefix_size='24')
ip_address_collection = [ip_address_1, ip_address_2,
ip_address_3]
# Create IP Address List
success = self.driver.ex_create_ip_address_list(
ex_network_domain=net_domain.id, name=name,
ip_version=ip_version, description=description,
ip_address_collection=ip_address_collection,
child_ip_address_list=child_ip_address_list_id)
self.assertTrue(success)
def test_ex_edit_ip_address_list(self):
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.111')
ip_address_collection = [ip_address_1]
child_ip_address_list = DimensionDataChildIpAddressList(
id='2221ef78-4059-4bc1-b433-3f6ad698dc41',
name="test_child_ip_address_list edited")
ip_address_list = DimensionDataIpAddressList(
id='1111ef78-4059-4bc1-b433-3f6ad698d111',
name="test ip address list edited",
ip_version="IPv4", description="test",
ip_address_collection=ip_address_collection,
child_ip_address_lists=child_ip_address_list,
state="NORMAL",
create_time='2015-09-29T02:49:45'
)
success = self.driver.ex_edit_ip_address_list(
ex_ip_address_list=ip_address_list,
description="test ip address list",
ip_address_collection=ip_address_collection,
child_ip_address_lists=child_ip_address_list
)
self.assertTrue(success)
def test_ex_edit_ip_address_list_STR(self):
ip_address_1 = DimensionDataIpAddress(begin='190.2.2.111')
ip_address_collection = [ip_address_1]
child_ip_address_list = DimensionDataChildIpAddressList(
id='2221ef78-4059-4bc1-b433-3f6ad698dc41',
name="test_child_ip_address_list edited")
success = self.driver.ex_edit_ip_address_list(
ex_ip_address_list='84e34850-595d- 436e-a885-7cd37edb24a4',
description="test ip address list",
ip_address_collection=ip_address_collection,
child_ip_address_lists=child_ip_address_list
)
self.assertTrue(success)
def test_ex_delete_ip_address_list(self):
child_ip_address_list = DimensionDataChildIpAddressList(
id='2221ef78-4059-4bc1-b433-3f6ad698dc41',
name="test_child_ip_address_list edited")
ip_address_list = DimensionDataIpAddressList(
id='1111ef78-4059-4bc1-b433-3f6ad698d111',
name="test ip address list edited",
ip_version="IPv4", description="test",
ip_address_collection=None,
child_ip_address_lists=child_ip_address_list,
state="NORMAL",
create_time='2015-09-29T02:49:45'
)
success = self.driver.ex_delete_ip_address_list(
ex_ip_address_list=ip_address_list)
self.assertTrue(success)
def test_ex_delete_ip_address_list_STR(self):
success = self.driver.ex_delete_ip_address_list(
ex_ip_address_list='111ef78-4059-4bc1-b433-3f6ad698d111')
self.assertTrue(success)
def test_ex_list_portlist(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(
ex_network_domain=net_domain)
self.assertTrue(isinstance(portlist, list))
self.assertEqual(len(portlist), 3)
self.assertTrue(isinstance(portlist[0].name, str))
self.assertTrue(isinstance(portlist[0].description, str))
self.assertTrue(isinstance(portlist[0].state, str))
self.assertTrue(isinstance(portlist[0].port_collection, list))
self.assertTrue(isinstance(portlist[0].port_collection[0].begin, str))
self.assertTrue(isinstance(portlist[0].port_collection[0].end, str))
self.assertTrue(isinstance(portlist[0].child_portlist_list, list))
self.assertTrue(isinstance(portlist[0].child_portlist_list[0].id,
str))
self.assertTrue(isinstance(portlist[0].child_portlist_list[0].name,
str))
self.assertTrue(isinstance(portlist[0].create_time, str))
def test_ex_get_port_list(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist_id = self.driver.ex_list_portlist(
ex_network_domain=net_domain)[0].id
portlist = self.driver.ex_get_portlist(
ex_portlist_id=portlist_id)
self.assertTrue(isinstance(portlist, DimensionDataPortList))
self.assertTrue(isinstance(portlist.name, str))
self.assertTrue(isinstance(portlist.description, str))
self.assertTrue(isinstance(portlist.state, str))
self.assertTrue(isinstance(portlist.port_collection, list))
self.assertTrue(isinstance(portlist.port_collection[0].begin, str))
self.assertTrue(isinstance(portlist.port_collection[0].end, str))
self.assertTrue(isinstance(portlist.child_portlist_list, list))
self.assertTrue(isinstance(portlist.child_portlist_list[0].id,
str))
self.assertTrue(isinstance(portlist.child_portlist_list[0].name,
str))
self.assertTrue(isinstance(portlist.create_time, str))
def test_ex_get_portlist_STR(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(
ex_network_domain=net_domain)[0]
port_list = self.driver.ex_get_portlist(
ex_portlist_id=portlist.id)
self.assertTrue(isinstance(port_list, DimensionDataPortList))
self.assertTrue(isinstance(port_list.name, str))
self.assertTrue(isinstance(port_list.description, str))
self.assertTrue(isinstance(port_list.state, str))
self.assertTrue(isinstance(port_list.port_collection, list))
self.assertTrue(isinstance(port_list.port_collection[0].begin, str))
self.assertTrue(isinstance(port_list.port_collection[0].end, str))
self.assertTrue(isinstance(port_list.child_portlist_list, list))
self.assertTrue(isinstance(port_list.child_portlist_list[0].id,
str))
self.assertTrue(isinstance(port_list.child_portlist_list[0].name,
str))
self.assertTrue(isinstance(port_list.create_time, str))
def test_ex_create_portlist_NOCHILDPORTLIST(self):
name = "Test_Port_List"
description = "Test Description"
net_domain = self.driver.ex_list_network_domains()[0]
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
# Create IP Address List
success = self.driver.ex_create_portlist(
ex_network_domain=net_domain, name=name,
description=description,
port_collection=port_collection
)
self.assertTrue(success)
def test_ex_create_portlist(self):
name = "Test_Port_List"
description = "Test Description"
net_domain = self.driver.ex_list_network_domains()[0]
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports = [child_port_1, child_port_2]
# Create IP Address List
success = self.driver.ex_create_portlist(
ex_network_domain=net_domain, name=name,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports
)
self.assertTrue(success)
def test_ex_create_portlist_STR(self):
name = "Test_Port_List"
description = "Test Description"
net_domain = self.driver.ex_list_network_domains()[0]
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports_ids = [child_port_1.id, child_port_2.id]
# Create IP Address List
success = self.driver.ex_create_portlist(
ex_network_domain=net_domain.id, name=name,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports_ids
)
self.assertTrue(success)
def test_ex_edit_portlist(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(net_domain)[0]
description = "Test Description"
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports = [child_port_1.id, child_port_2.id]
# Create IP Address List
success = self.driver.ex_edit_portlist(
ex_portlist=portlist,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports
)
self.assertTrue(success)
def test_ex_edit_portlist_STR(self):
portlist_id = "484174a2-ae74-4658-9e56-50fc90e086cf"
description = "Test Description"
port_1 = DimensionDataPort(begin='8080')
port_2 = DimensionDataIpAddress(begin='8899',
end='9023')
port_collection = [port_1, port_2]
child_port_1 = DimensionDataChildPortList(
id="333174a2-ae74-4658-9e56-50fc90e086cf", name='test port 1')
child_port_2 = DimensionDataChildPortList(
id="311174a2-ae74-4658-9e56-50fc90e04444", name='test port 2')
child_ports_ids = [child_port_1.id, child_port_2.id]
# Create IP Address List
success = self.driver.ex_edit_portlist(
ex_portlist=portlist_id,
description=description,
port_collection=port_collection,
child_portlist_list=child_ports_ids
)
self.assertTrue(success)
def test_ex_delete_portlist(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(net_domain)[0]
success = self.driver.ex_delete_portlist(
ex_portlist=portlist)
self.assertTrue(success)
def test_ex_delete_portlist_STR(self):
net_domain = self.driver.ex_list_network_domains()[0]
portlist = self.driver.ex_list_portlist(net_domain)[0]
success = self.driver.ex_delete_portlist(
ex_portlist=portlist.id)
self.assertTrue(success)
def test_import_image(self):
tag_dictionaries = {'tagkey1_name': 'dev test', 'tagkey2_name': None}
success = self.driver.import_image(
ovf_package_name='aTestGocToNGoc2_export2.mf',
name='Libcloud NGOCImage_New 2',
description='test',
cluster_id='QA1_N2_VMWARE_1-01',
is_guest_os_customization='false',
tagkey_name_value_dictionaries=tag_dictionaries)
self.assertTrue(success)
def test_import_image_error_too_many_choice(self):
tag_dictionaries = {'tagkey1_name': 'dev test', 'tagkey2_name': None}
with self.assertRaises(ValueError):
self.driver.import_image(
ovf_package_name='aTestGocToNGoc2_export2.mf',
name='Libcloud NGOCImage_New 2',
description='test',
cluster_id='QA1_N2_VMWARE_1-01',
datacenter_id='QA1_N1_VMWARE_1',
is_guest_os_customization='false',
tagkey_name_value_dictionaries=tag_dictionaries)
def test_import_image_error_missing_choice(self):
tag_dictionaries = {'tagkey1_name': 'dev test', 'tagkey2_name': None}
with self.assertRaises(ValueError):
self.driver.import_image(
ovf_package_name='aTestGocToNGoc2_export2.mf',
name='Libcloud NGOCImage_New 2',
description='test',
cluster_id=None,
datacenter_id=None,
is_guest_os_customization='false',
tagkey_name_value_dictionaries=tag_dictionaries)
def test_exchange_nic_vlans(self):
success = self.driver.ex_exchange_nic_vlans(
nic_id_1='a4b4b42b-ccb5-416f-b052-ce7cb7fdff12',
nic_id_2='b39d09b8-ea65-424a-8fa6-c6f5a98afc69')
self.assertTrue(success)
def test_change_nic_network_adapter(self):
success = self.driver.ex_change_nic_network_adapter(
nic_id='0c55c269-20a5-4fec-8054-22a245a48fe4',
network_adapter_name='E1000')
self.assertTrue(success)
def test_ex_create_node_uncustomized_mcp2_using_vlan(self):
# Get VLAN
vlan = self.driver.ex_get_vlan('0e56433f-d808-4669-821d-812769517ff8')
# Create node using vlan instead of private IPv4
node = self.driver.ex_create_node_uncustomized(
name='test_server_05',
image='fake_customer_image',
ex_network_domain='fakenetworkdomain',
ex_is_started=False,
ex_description=None,
ex_cluster_id=None,
ex_cpu_specification=None,
ex_memory_gb=None,
ex_primary_nic_private_ipv4=None,
ex_primary_nic_vlan=vlan,
ex_primary_nic_network_adapter=None,
ex_additional_nics=None,
ex_disks=None,
ex_tagid_value_pairs=None,
ex_tagname_value_pairs=None)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
def test_ex_create_node_uncustomized_mcp2_using_ipv4(self):
node = self.driver.ex_create_node_uncustomized(
name='test_server_05',
image='fake_customer_image',
ex_network_domain='fakenetworkdomain',
ex_is_started=False,
ex_description=None,
ex_cluster_id=None,
ex_cpu_specification=None,
ex_memory_gb=None,
ex_primary_nic_private_ipv4='10.0.0.1',
ex_primary_nic_vlan=None,
ex_primary_nic_network_adapter=None,
ex_additional_nics=None,
ex_disks=None,
ex_tagid_value_pairs=None,
ex_tagname_value_pairs=None)
self.assertEqual(node.id, 'e75ead52-692f-4314-8725-c8a4f4d13a87')
class InvalidRequestError(Exception):
def __init__(self, tag):
super(InvalidRequestError, self).__init__("Invalid Request - %s" % tag)
class DimensionDataMockHttp(MockHttp):
fixtures = ComputeFileFixtures('dimensiondata')
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_report_usage(self, method, url, body, headers):
body = self.fixtures.load(
'summary_usage_report.csv'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_report_usageDetailed(self, method, url, body, headers):
body = self.fixtures.load(
'detailed_usage_report.csv'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_auditlog(self, method, url, body, headers):
body = self.fixtures.load(
'audit_log.csv'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED, "", {}, httplib.responses[httplib.UNAUTHORIZED])
def _oec_0_9_myaccount(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_INPROGRESS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_PAGINATED(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_ALLFILTERS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_base_image(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_base_image.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_base_imageWithDiskSpeed(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_base_imageWithDiskSpeed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployed(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_pendingDeploy(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_pendingDeploy.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_datacenter(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11(self, method, url, body, headers):
body = None
action = url.split('?')[-1]
if action == 'restart':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_restart.xml')
elif action == 'shutdown':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_shutdown.xml')
elif action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_delete.xml')
elif action == 'start':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_start.xml')
elif action == 'poweroff':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_poweroff.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_INPROGRESS(self, method, url, body, headers):
body = None
action = url.split('?')[-1]
if action == 'restart':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_restart_INPROGRESS.xml')
elif action == 'shutdown':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_shutdown_INPROGRESS.xml')
elif action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_delete_INPROGRESS.xml')
elif action == 'start':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_start_INPROGRESS.xml')
elif action == 'poweroff':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_11_poweroff_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server(self, method, url, body, headers):
body = self.fixtures.load(
'_oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation(self, method, url, body, headers):
if method is "POST":
request = ET.fromstring(body)
if request.tag != "{http://oec.api.opsource.net/schemas/network}NewNetworkWithLocation":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation_NA9(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkWithLocation.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_4bba37be_506f_11e3_b29c_001517c4643e(self, method,
url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_4bba37be_506f_11e3_b29c_001517c4643e.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSize(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSize.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSpeed(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1_changeSpeed.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1(self, method, url, body, headers):
action = url.split('?')[-1]
if action == 'delete':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_disk_1.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if method == 'POST':
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_e75ead52_692f_4314_8725_c8a4f4d13a87_POST.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_create.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_FAIL_EXISTING(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_create_FAIL.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_07e3621a_a920_4a9a_943c_d8021f27f418(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_delete.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_07e3621a_a920_4a9a_943c_d8021f27f418_FAIL(self, method, url, body, headers):
body = self.fixtures.load(
'oec_0_9_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_antiAffinityRule_delete_FAIL.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server(self, method, url, body, headers):
body = self.fixtures.load(
'server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_deleteServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deleteServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_deleteServer_RESOURCEBUSY.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}rebootServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_rebootServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_rebootServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}rebootServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_rebootServer_RESOURCEBUSY.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server(self, method, url, body, headers):
if url.endswith('datacenterId=NA3'):
body = self.fixtures.load(
'2.4/server_server_NA3.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGESIZE50(self, method, url, body, headers):
if not url.endswith('pageSize=50'):
raise ValueError("pageSize is not set as expected")
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_EMPTY(self, method, url, body, headers):
body = self.fixtures.load(
'server_server_paginated_empty.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGED_THEN_EMPTY(self, method, url, body, headers):
if 'pageNumber=2' in url:
body = self.fixtures.load(
'server_server_paginated_empty.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
else:
body = self.fixtures.load(
'2.4/server_server_paginated.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGINATED(self, method, url, body, headers):
if 'pageNumber=2' in url:
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
else:
body = self.fixtures.load(
'2.4/server_server_paginated.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_PAGINATEDEMPTY(self, method, url, body, headers):
body = self.fixtures.load(
'server_server_paginated_empty.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'datacenterId':
assert value == 'fake_loc'
elif key == 'networkId':
assert value == 'fake_network'
elif key == 'networkDomainId':
assert value == 'fake_network_domain'
elif key == 'vlanId':
assert value == 'fake_vlan'
elif key == 'ipv6':
assert value == 'fake_ipv6'
elif key == 'privateIpv4':
assert value == 'fake_ipv4'
elif key == 'name':
assert value == 'fake_name'
elif key == 'state':
assert value == 'fake_state'
elif key == 'started':
assert value == 'True'
elif key == 'deployed':
assert value == 'True'
elif key == 'sourceImageId':
assert value == 'fake_image'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'2.4/server_server.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_antiAffinityRule(self, method, url, body, headers):
body = self.fixtures.load(
'server_antiAffinityRule_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_antiAffinityRule_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'id':
assert value == 'FAKE_ID'
elif key == 'state':
assert value == 'FAKE_STATE'
elif key == 'pageSize':
assert value == '250'
elif key == 'networkDomainId':
pass
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'server_antiAffinityRule_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_antiAffinityRule_PAGINATED(self, method, url, body, headers):
if 'pageNumber=2' in url:
body = self.fixtures.load(
'server_antiAffinityRule_list.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
else:
body = self.fixtures.load(
'server_antiAffinityRule_list_PAGINATED.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_infrastructure_datacenter(self, method, url, body, headers):
if url.endswith('id=NA9'):
body = self.fixtures.load(
'infrastructure_datacenter_NA9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'infrastructure_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_infrastructure_datacenter_ALLFILTERS(self, method, url, body, headers):
if url.endswith('id=NA9'):
body = self.fixtures.load(
'infrastructure_datacenter_NA9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
body = self.fixtures.load(
'infrastructure_datacenter.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_updateVmwareTools(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}updateVmwareTools":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_updateVmwareTools.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}startServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_startServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_startServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}startServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_startServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}shutdownServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_shutdownServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_shutdownServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}shutdownServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_shutdownServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_resetServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}resetServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_resetServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}powerOffServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_powerOffServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_powerOffServer_INPROGRESS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}powerOffServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_powerOffServer_INPROGRESS.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_11_INPROGRESS(
self, method, url, body, headers):
body = self.fixtures.load('2.4/server_GetServer.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain(self, method, url, body, headers):
body = self.fixtures.load(
'network_networkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'datacenterId':
assert value == 'fake_location'
elif key == 'type':
assert value == 'fake_plan'
elif key == 'name':
assert value == 'fake_name'
elif key == 'state':
assert value == 'fake_state'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'network_networkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan(self, method, url, body, headers):
body = self.fixtures.load(
'network_vlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'datacenterId':
assert value == 'fake_location'
elif key == 'networkDomainId':
assert value == 'fake_network_domain'
elif key == 'ipv6Address':
assert value == 'fake_ipv6'
elif key == 'privateIpv4Address':
assert value == 'fake_ipv4'
elif key == 'name':
assert value == 'fake_name'
elif key == 'state':
assert value == 'fake_state'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'network_vlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployServer":
raise InvalidRequestError(request.tag)
# Make sure the we either have a network tag with an IP or networkId
# Or Network info with a primary nic that has privateip or vlanid
network = request.find(fixxpath('network', TYPES_URN))
network_info = request.find(fixxpath('networkInfo', TYPES_URN))
if network is not None:
if network_info is not None:
raise InvalidRequestError("Request has both MCP1 and MCP2 values")
ipv4 = findtext(network, 'privateIpv4', TYPES_URN)
networkId = findtext(network, 'networkId', TYPES_URN)
if ipv4 is None and networkId is None:
raise InvalidRequestError('Invalid request MCP1 requests need privateIpv4 or networkId')
elif network_info is not None:
if network is not None:
raise InvalidRequestError("Request has both MCP1 and MCP2 values")
primary_nic = network_info.find(fixxpath('primaryNic', TYPES_URN))
ipv4 = findtext(primary_nic, 'privateIpv4', TYPES_URN)
vlanId = findtext(primary_nic, 'vlanId', TYPES_URN)
if ipv4 is None and vlanId is None:
raise InvalidRequestError('Invalid request MCP2 requests need privateIpv4 or vlanId')
else:
raise InvalidRequestError('Invalid request, does not have network or network_info in XML')
body = self.fixtures.load(
'server_deployServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_server_e75ead52_692f_4314_8725_c8a4f4d13a87(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/server_server_e75ead52_692f_4314_8725_c8a4f4d13a87.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deployNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be(self, method, url, body, headers):
body = self.fixtures.load(
'network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be_ALLFILTERS(self, method, url, body, headers):
body = self.fixtures.load(
'network_networkDomain_8cdfd607_f429_4df6_9352_162cfc0891be.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_editNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNetworkDomain(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteNetworkDomain":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteNetworkDomain.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deployVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deployVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deployVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_vlan_0e56433f_d808_4669_821d_812769517ff8(self, method, url, body, headers):
body = self.fixtures.load(
'network_vlan_0e56433f_d808_4669_821d_812769517ff8.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_editVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_expandVlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}expandVlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_expandVlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_addPublicIpBlock(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}addPublicIpBlock":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_addPublicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_4487241a_f0ca_11e3_9315_d4bed9b167ba(self, method, url, body, headers):
body = self.fixtures.load(
'network_publicIpBlock_4487241a_f0ca_11e3_9315_d4bed9b167ba.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock(self, method, url, body, headers):
body = self.fixtures.load(
'network_publicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_publicIpBlock_9945dc4a_bdce_11e4_8c14_b8ca3a5d9ef8(self, method, url, body, headers):
body = self.fixtures.load(
'network_publicIpBlock_9945dc4a_bdce_11e4_8c14_b8ca3a5d9ef8.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_removePublicIpBlock(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removePublicIpBlock":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_removePublicIpBlock.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule(self, method, url, body, headers):
body = self.fixtures.load(
'network_firewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_createFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_firewallRule_d0a20f59_77b9_4f28_a63b_e58496b73a6c(self, method, url, body, headers):
body = self.fixtures.load(
'network_firewallRule_d0a20f59_77b9_4f28_a63b_e58496b73a6c.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_editFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteFirewallRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteFirewallRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteFirewallRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createNatRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createNatRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_createNatRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule(self, method, url, body, headers):
body = self.fixtures.load(
'network_natRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_natRule_2187a636_7ebb_49a1_a2ff_5d617f496dce(self, method, url, body, headers):
body = self.fixtures.load(
'network_natRule_2187a636_7ebb_49a1_a2ff_5d617f496dce.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteNatRule(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteNatRule":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'network_deleteNatRule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_addNic(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}addNic":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_addNic.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_removeNic(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removeNic":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_removeNic.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_disableServerMonitoring(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}disableServerMonitoring":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_disableServerMonitoring.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_enableServerMonitoring(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}enableServerMonitoring":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_enableServerMonitoring.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_changeServerMonitoringPlan(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}changeServerMonitoringPlan":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_changeServerMonitoringPlan.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_osImage.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_c14b1a46_2428_44c1_9c1a_b20e6418d08c(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_osImage_c14b1a46_2428_44c1_9c1a_b20e6418d08c.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_6b4fb0c7_a57b_4f58_b59c_9958f94f971a(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_osImage_6b4fb0c7_a57b_4f58_b59c_9958f94f971a.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_5234e5c7_01de_4411_8b6e_baeb8d91cf5d(self, method, url, body, headers):
body = self.fixtures.load(
'image_osImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_2ffa36c8_1848_49eb_b4fa_9d908775f68c(self, method, url, body, headers):
body = self.fixtures.load(
'image_osImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_osImage_FAKE_IMAGE_ID(self, method, url, body, headers):
body = self.fixtures.load(
'image_osImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_customerImage.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage_5234e5c7_01de_4411_8b6e_baeb8d91cf5d(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_customerImage_5234e5c7_01de_4411_8b6e_baeb8d91cf5d.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage_2ffa36c8_1848_49eb_b4fa_9d908775f68c(self, method, url, body, headers):
body = self.fixtures.load(
'2.4/image_customerImage_2ffa36c8_1848_49eb_b4fa_9d908775f68c.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_customerImage_FAKE_IMAGE_ID(self, method, url, body, headers):
body = self.fixtures.load(
'image_customerImage_BAD_REQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_reconfigureServer(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}reconfigureServer":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'server_reconfigureServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_cleanServer(self, method, url, body, headers):
body = self.fixtures.load(
'server_cleanServer.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_addDisk(self, method, url, body, headers):
body = self.fixtures.load(
'server_addDisk.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_removeDisk(self, method, url, body, headers):
body = self.fixtures.load(
'server_removeDisk.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_createTagKey(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is None:
raise ValueError("Name must have a value in the request")
if description is not None:
raise ValueError("Default description for a tag should be blank")
if value_required is None or value_required != 'true':
raise ValueError("Default valueRequired should be true")
if display_on_report is None or display_on_report != 'true':
raise ValueError("Default displayOnReport should be true")
body = self.fixtures.load(
'tag_createTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_createTagKey_ALLPARAMS(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}createTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is None:
raise ValueError("Name must have a value in the request")
if description is None:
raise ValueError("Description should have a value")
if value_required is None or value_required != 'false':
raise ValueError("valueRequired should be false")
if display_on_report is None or display_on_report != 'false':
raise ValueError("displayOnReport should be false")
body = self.fixtures.load(
'tag_createTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_createTagKey_BADREQUEST(self, method, url, body, headers):
body = self.fixtures.load(
'tag_createTagKey_BADREQUEST.xml')
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_SINGLE(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_list_SINGLE.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_ALLFILTERS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'id':
assert value == 'fake_id'
elif key == 'name':
assert value == 'fake_name'
elif key == 'valueRequired':
assert value == 'false'
elif key == 'displayOnReport':
assert value == 'false'
elif key == 'pageSize':
assert value == '250'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'tag_tagKey_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_d047c609_93d7_4bc5_8fc9_732c85840075(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_5ab77f5f_5aa9_426f_8459_4eab34e03d54.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tagKey_d047c609_93d7_4bc5_8fc9_732c85840075_NOEXIST(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tagKey_5ab77f5f_5aa9_426f_8459_4eab34e03d54_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_editTagKey_NAME(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is None:
raise ValueError("Name must have a value in the request")
if description is not None:
raise ValueError("Description should be empty")
if value_required is not None:
raise ValueError("valueRequired should be empty")
if display_on_report is not None:
raise ValueError("displayOnReport should be empty")
body = self.fixtures.load(
'tag_editTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_editTagKey_NOTNAME(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}editTagKey":
raise InvalidRequestError(request.tag)
name = findtext(request, 'name', TYPES_URN)
description = findtext(request, 'description', TYPES_URN)
value_required = findtext(request, 'valueRequired', TYPES_URN)
display_on_report = findtext(request, 'displayOnReport', TYPES_URN)
if name is not None:
raise ValueError("Name should be empty")
if description is None:
raise ValueError("Description should not be empty")
if value_required is None:
raise ValueError("valueRequired should not be empty")
if display_on_report is None:
raise ValueError("displayOnReport should not be empty")
body = self.fixtures.load(
'tag_editTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_editTagKey_NOCHANGE(self, method, url, body, headers):
body = self.fixtures.load(
'tag_editTagKey_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_deleteTagKey(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}deleteTagKey":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'tag_deleteTagKey.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_deleteTagKey_NOEXIST(self, method, url, body, headers):
body = self.fixtures.load(
'tag_deleteTagKey_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_applyTags(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}applyTags":
raise InvalidRequestError(request.tag)
asset_type = findtext(request, 'assetType', TYPES_URN)
asset_id = findtext(request, 'assetId', TYPES_URN)
tag = request.find(fixxpath('tag', TYPES_URN))
tag_key_name = findtext(tag, 'tagKeyName', TYPES_URN)
value = findtext(tag, 'value', TYPES_URN)
if asset_type is None:
raise ValueError("assetType should not be empty")
if asset_id is None:
raise ValueError("assetId should not be empty")
if tag_key_name is None:
raise ValueError("tagKeyName should not be empty")
if value is None:
raise ValueError("value should not be empty")
body = self.fixtures.load(
'tag_applyTags.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_applyTags_NOVALUE(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}applyTags":
raise InvalidRequestError(request.tag)
asset_type = findtext(request, 'assetType', TYPES_URN)
asset_id = findtext(request, 'assetId', TYPES_URN)
tag = request.find(fixxpath('tag', TYPES_URN))
tag_key_name = findtext(tag, 'tagKeyName', TYPES_URN)
value = findtext(tag, 'value', TYPES_URN)
if asset_type is None:
raise ValueError("assetType should not be empty")
if asset_id is None:
raise ValueError("assetId should not be empty")
if tag_key_name is None:
raise ValueError("tagKeyName should not be empty")
if value is not None:
raise ValueError("value should be empty")
body = self.fixtures.load(
'tag_applyTags.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_applyTags_NOTAGKEY(self, method, url, body, headers):
body = self.fixtures.load(
'tag_applyTags_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_removeTags(self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}removeTags":
raise InvalidRequestError(request.tag)
body = self.fixtures.load(
'tag_removeTag.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_removeTags_NOTAG(self, method, url, body, headers):
body = self.fixtures.load(
'tag_removeTag_BADREQUEST.xml'
)
return (httplib.BAD_REQUEST, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tag(self, method, url, body, headers):
body = self.fixtures.load(
'tag_tag_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_tag_tag_ALLPARAMS(self, method, url, body, headers):
(_, params) = url.split('?')
parameters = params.split('&')
for parameter in parameters:
(key, value) = parameter.split('=')
if key == 'assetId':
assert value == 'fake_asset_id'
elif key == 'assetType':
assert value == 'fake_asset_type'
elif key == 'valueRequired':
assert value == 'false'
elif key == 'displayOnReport':
assert value == 'false'
elif key == 'pageSize':
assert value == '250'
elif key == 'datacenterId':
assert value == 'fake_location'
elif key == 'value':
assert value == 'fake_value'
elif key == 'tagKeyName':
assert value == 'fake_tag_key_name'
elif key == 'tagKeyId':
assert value == 'fake_tag_key_id'
else:
raise ValueError("Could not find in url parameters {0}:{1}".format(key, value))
body = self.fixtures.load(
'tag_tag_list.xml'
)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_ipAddressList(
self, method, url, body, headers):
body = self.fixtures.load('ip_address_lists.xml')
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_ipAddressList_FILTERBYNAME(
self, method, url, body, headers):
body = self.fixtures.load('ip_address_lists_FILTERBYNAME.xml')
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createIpAddressList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"createIpAddressList":
raise InvalidRequestError(request.tag)
net_domain = findtext(request, 'networkDomainId', TYPES_URN)
if net_domain is None:
raise ValueError("Network Domain should not be empty")
name = findtext(request, 'name', TYPES_URN)
if name is None:
raise ValueError("Name should not be empty")
ip_version = findtext(request, 'ipVersion', TYPES_URN)
if ip_version is None:
raise ValueError("IP Version should not be empty")
ip_address_col_required = findall(request, 'ipAddress', TYPES_URN)
child_ip_address_required = findall(request, 'childIpAddressListId',
TYPES_URN)
if 0 == len(ip_address_col_required) and \
0 == len(child_ip_address_required):
raise ValueError("At least one ipAddress element or "
"one childIpAddressListId element must be "
"provided.")
if ip_address_col_required[0].get('begin') is None:
raise ValueError("IP Address should not be empty")
body = self.fixtures.load(
'ip_address_list_create.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editIpAddressList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"editIpAddressList":
raise InvalidRequestError(request.tag)
ip_address_list = request.get('id')
if ip_address_list is None:
raise ValueError("IpAddressList ID should not be empty")
name = findtext(request, 'name', TYPES_URN)
if name is not None:
raise ValueError("Name should not exists in request")
ip_version = findtext(request, 'ipVersion', TYPES_URN)
if ip_version is not None:
raise ValueError("IP Version should not exists in request")
ip_address_col_required = findall(request, 'ipAddress', TYPES_URN)
child_ip_address_required = findall(request, 'childIpAddressListId',
TYPES_URN)
if 0 == len(ip_address_col_required) and \
0 == len(child_ip_address_required):
raise ValueError("At least one ipAddress element or "
"one childIpAddressListId element must be "
"provided.")
if ip_address_col_required[0].get('begin') is None:
raise ValueError("IP Address should not be empty")
body = self.fixtures.load(
'ip_address_list_edit.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deleteIpAddressList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"deleteIpAddressList":
raise InvalidRequestError(request.tag)
ip_address_list = request.get('id')
if ip_address_list is None:
raise ValueError("IpAddressList ID should not be empty")
body = self.fixtures.load(
'ip_address_list_delete.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_portList(
self, method, url, body, headers):
body = self.fixtures.load(
'port_list_lists.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_portList_c8c92ea3_2da8_4d51_8153_f39bec794d69(
self, method, url, body, headers):
body = self.fixtures.load(
'port_list_get.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_createPortList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"createPortList":
raise InvalidRequestError(request.tag)
net_domain = findtext(request, 'networkDomainId', TYPES_URN)
if net_domain is None:
raise ValueError("Network Domain should not be empty")
ports_required = findall(request, 'port', TYPES_URN)
child_port_list_required = findall(request, 'childPortListId',
TYPES_URN)
if 0 == len(ports_required) and \
0 == len(child_port_list_required):
raise ValueError("At least one port element or one "
"childPortListId element must be provided")
if ports_required[0].get('begin') is None:
raise ValueError("PORT begin value should not be empty")
body = self.fixtures.load(
'port_list_create.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_editPortList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"editPortList":
raise InvalidRequestError(request.tag)
ports_required = findall(request, 'port', TYPES_URN)
child_port_list_required = findall(request, 'childPortListId',
TYPES_URN)
if 0 == len(ports_required) and \
0 == len(child_port_list_required):
raise ValueError("At least one port element or one "
"childPortListId element must be provided")
if ports_required[0].get('begin') is None:
raise ValueError("PORT begin value should not be empty")
body = self.fixtures.load(
'port_list_edit.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_network_deletePortList(
self, method, url, body, headers):
request = ET.fromstring(body)
if request.tag != "{urn:didata.com:api:cloud:types}" \
"deletePortList":
raise InvalidRequestError(request.tag)
port_list = request.get('id')
if port_list is None:
raise ValueError("Port List ID should not be empty")
body = self.fixtures.load(
'ip_address_list_delete.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_cloneServer(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/server_clone_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_image_importImage(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/import_image_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_exchangeNicVlans(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/exchange_nic_vlans_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_changeNetworkAdapter(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/change_nic_networkadapter_response.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
def _caas_2_4_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_server_deployUncustomizedServer(
self, method, url, body, headers):
body = self.fixtures.load(
'2.4/deploy_customised_server.xml'
)
return httplib.OK, body, {}, httplib.responses[httplib.OK]
if __name__ == '__main__':
sys.exit(unittest.main())
| illfelder/libcloud | libcloud/test/compute/test_dimensiondata_v2_4.py | Python | apache-2.0 | 163,626 |
"""
Tests for the public API exposed by L{eliot}.
"""
from __future__ import unicode_literals
from unittest import TestCase
from .._output import Logger
import eliot
class PublicAPITests(TestCase):
"""
Tests for the public API.
"""
def test_addDestination(self):
"""
L{eliot.addDestination} adds destinations to the L{Destinations}
attached to L{Logger}.
"""
self.assertEqual(eliot.addDestination, Logger._destinations.add)
def test_removeDestination(self):
"""
L{eliot.addDestination} removes destinations from the L{Destinations}
attached to L{Logger}.
"""
self.assertEqual(eliot.removeDestination, Logger._destinations.remove)
def test_addGlobalFields(self):
"""
L{eliot.addGlobalFields} calls the corresponding method on the
L{Destinations} attached to L{Logger}.
"""
self.assertEqual(eliot.addGlobalFields,
Logger._destinations.addGlobalFields)
class PEP8Tests(TestCase):
"""
Tests for the PEP 8 variant of the the public API.
"""
def test_add_destination(self):
"""
L{eliot.addDestionation} is the same as L{eliot.add_destination}.
"""
self.assertIs(eliot.add_destination, eliot.addDestination)
def test_remove_destination(self):
"""
L{eliot.removeDestionation} is the same as L{eliot.remove_destination}.
"""
self.assertIs(eliot.remove_destination, eliot.removeDestination)
def test_add_global_fields(self):
"""
L{eliot.add_global_fields} is the same as L{eliot.addGlobalFields}.
"""
self.assertIs(eliot.add_global_fields, eliot.addGlobalFields)
def test_write_traceback(self):
"""
L{eliot.writeTraceback} is the same as L{eliot.write_traceback}.
"""
self.assertIs(eliot.write_traceback, eliot.writeTraceback)
def test_write_failure(self):
"""
L{eliot.writeFailure} is the same as L{eliot.write_failure}.
"""
self.assertIs(eliot.write_failure, eliot.writeFailure)
def test_start_task(self):
"""
L{eliot.startTask} is the same as L{eliot.start_task}.
"""
self.assertIs(eliot.start_task, eliot.startTask)
def test_start_action(self):
"""
L{eliot.startAction} is the same as L{eliot.start_action}.
"""
self.assertIs(eliot.start_action, eliot.startAction)
| iffy/eliot | eliot/tests/test_api.py | Python | apache-2.0 | 2,516 |
__author__ = "mfreer"
__date__ = "2011-05-27 14:27"
__version__ = "1.0"
__all__ = ["TempVirtualCnrm"]
import egads.core.egads_core as egads_core
import egads.core.metadata as egads_metadata
class TempVirtualCnrm(egads_core.EgadsAlgorithm):
"""
FILE temp_virtual_cnrm.py
VERSION 1.0
CATEGORY Thermodynamics
PURPOSE Calculate virtual temperature
DESCRIPTION Calculates virtual temperature given static pressure and mixing ratio.
INPUT T_s vector K or C static temperature
r vector g/kg water vapor mixing ratio
OUTPUT T_v vector K or C virtual temperature
SOURCE CNRM/GMEI/TRAMM
REFERENCES Triplet-Roche, page 56.
"""
def __init__(self, return_Egads=True):
egads_core.EgadsAlgorithm.__init__(self, return_Egads)
self.output_metadata = egads_metadata.VariableMetadata({'units':'K',
'long_name':'virtual temperature',
'standard_name':'virtual_temperature',
'Category':['Thermodynamics','Atmos State']})
self.metadata = egads_metadata.AlgorithmMetadata({'Inputs':['T_s', 'r'],
'InputUnits':['K','g/kg'],
'InputTypes':['vector','vector'],
'InputDescription':['Static temperature','Water vapor mixing ratio'],
'Outputs':['T_v'],
'OutputUnits':['K'],
'OutputTypes':['vector'],
'OutputDescription':['Virtual temperature'],
'Purpose':'Calculate virtual temperature',
'Description':'Calculates virtual temperature given static pressure and mixing ratio',
'Category':'Thermodynamics',
'Source':'CNRM/GMEI/TRAMM',
'References':'Triplet-Roche, page 56',
'Processor':self.name,
'ProcessorDate':__date__,
'ProcessorVersion':__version__,
'DateProcessed':self.now()},
self.output_metadata)
def run(self, T_s, r):
return egads_core.EgadsAlgorithm.run(self, T_s, r)
def _algorithm(self, T_s, r):
RvRa = 1.608
T_v = T_s * (1 + RvRa * r) / (1 + r)
return T_v
| eufarn7sp/egads-eufar | egads/algorithms/thermodynamics/temp_virtual_cnrm.py | Python | bsd-3-clause | 3,161 |
from django.db import models
from django.template.defaultfilters import slugify
from django.contrib.auth.models import User
class Tapas(models.Model):
nombre_tapa = models.CharField(max_length=128, unique=True)
votos = models.IntegerField(default=0)
slug = models.SlugField()
def save(self, *args, **kwargs):
# Uncomment if you don't want the slug to change every time the name changes
#if self.id is None:
#self.slug = slugify(self.name)
self.slug = slugify(self.nombre_tapa)
super(Tapas, self).save(*args, **kwargs)
def __unicode__(self): #For Python 2, use __str__ on Python 3
return self.nombre_tapa
class Bares(models.Model):
tapa = models.ForeignKey(Tapas)
nombre_bar = models.CharField(max_length=128)
direccion = models.CharField(max_length=128)
n_visitas = models.IntegerField(default=0)
def __unicode__(self): #For Python 2, use __str__ on Python 3
return self.nombre_bar
class UserProfile(models.Model):
# This line is required. Links UserProfile to a User model instance.
user = models.OneToOneField(User)
# The additional attributes we wish to include.
website = models.URLField(blank=True)
picture = models.ImageField(upload_to='profile_images', blank=True)
# Override the __unicode__() method to return out something meaningful!
def __unicode__(self):
return self.user.username
| sn1k/Vinos | rango/models.py | Python | mit | 1,441 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-06 02:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='GatherHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('import_status', models.IntegerField(choices=[(0, 'Conpleted'), (1, 'Failed')], default=0)),
('message', models.CharField(max_length=500, null=True)),
('no_people', models.IntegerField(blank=True, null=True)),
('no_groups', models.IntegerField(blank=True, null=True)),
('no_identifiers', models.IntegerField(blank=True, null=True)),
('next_scheduled', models.DateTimeField(blank=True, null=True)),
],
options={
'ordering': ['-created'],
},
),
]
| alzeih/ava | ava_core/gather/gather_abstract/migrations/0001_initial.py | Python | gpl-3.0 | 1,208 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#------------------------------------------------------------------------------
# file: $Id$
# auth: metagriffin <[email protected]>
# date: 2013/12/14
# copy: (C) Copyright 2013-EOT metagriffin -- see LICENSE.txt
#------------------------------------------------------------------------------
# This software is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#------------------------------------------------------------------------------
import os, sys, setuptools
from setuptools import setup, find_packages
# require python 2.7+
if sys.hexversion < 0x02070000:
raise RuntimeError('This package requires python 2.7 or better')
heredir = os.path.abspath(os.path.dirname(__file__))
def read(*parts, **kw):
try: return open(os.path.join(heredir, *parts)).read()
except: return kw.get('default', '')
test_dependencies = [
'nose >= 1.3.0',
'coverage >= 3.5.3',
]
dependencies = [
'distribute >= 0.6.24',
'pxml >= 0.2.10',
'blessings >= 1.5',
'six >= 1.4.1',
'aadict >= 0.2.1',
'morph >= 0.1.1',
'asset >= 0.6',
'requests >= 2.1.0',
'PyYAML >= 3.10',
'Pygments >= 1.6',
]
entrypoints = {
'console_scripts': [
'proxylog = proxylog.cli:main',
],
}
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Operating System :: OS Independent',
'Natural Language :: English',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
]
setup(
name = 'proxylog',
version = read('VERSION.txt', default='0.0.1').strip(),
description = 'A simple, logging, colorizing, pretty-formatting, HTTP proxy.',
long_description = read('README.rst'),
classifiers = classifiers,
author = 'metagriffin',
author_email = '[email protected]',
url = 'http://github.com/metagriffin/proxylog',
keywords = 'http proxy logging xml colorize prettify',
packages = find_packages(),
platforms = ['any'],
include_package_data = True,
zip_safe = True,
install_requires = dependencies,
tests_require = test_dependencies,
test_suite = 'proxylog',
entry_points = entrypoints,
license = 'GPLv3+',
)
#------------------------------------------------------------------------------
# end of $Id$
#------------------------------------------------------------------------------
| metagriffin/proxylog | setup.py | Python | gpl-3.0 | 3,313 |
#!/usr/bin/env python
from wagtail import __version__
from wagtail.utils.setup import assets, check_bdist_egg, sdist
try:
from setuptools import find_packages, setup
except ImportError:
from distutils.core import setup
# Hack to prevent "TypeError: 'NoneType' object is not callable" error
# in multiprocessing/util.py _exit_function when setup.py exits
# (see http://www.eby-sarna.com/pipermail/peak/2010-May/003357.html)
try:
import multiprocessing # noqa
except ImportError:
pass
install_requires = [
"Django>=2.2,<3.2",
"django-modelcluster>=5.1,<6.0",
"django-taggit>=1.0,<2.0",
"django-treebeard>=4.2.0,<5.0",
"djangorestframework>=3.11.1,<4.0",
"django-filter>=2.2,<3.0",
"draftjs_exporter>=2.1.5,<3.0",
"Pillow>=4.0.0,<9.0.0",
"beautifulsoup4>=4.8,<4.9",
"html5lib>=0.999,<2",
"Willow>=1.4,<1.5",
"requests>=2.11.1,<3.0",
"l18n>=2018.5",
"xlsxwriter>=1.2.8,<2.0",
"tablib[xls,xlsx]>=0.14.0",
"anyascii>=0.1.5",
]
# Testing dependencies
testing_extras = [
# Required for running the tests
'python-dateutil>=2.2',
'pytz>=2014.7',
'elasticsearch>=5.0,<6.0',
'Jinja2>=2.8,<3.0',
'boto3>=1.16,<1.17',
'freezegun>=0.3.8',
'openpyxl>=2.6.4',
'Unidecode>=0.04.14,<2.0',
# For coverage and PEP8 linting
'coverage>=3.7.0',
'flake8>=3.6.0',
'isort==5.6.4', # leave this pinned - it tends to change rules between patch releases
'flake8-blind-except==0.1.1',
'flake8-print==2.0.2',
'doc8==0.8.1',
# For templates linting
'jinjalint>=0.5',
# Pipenv hack to fix broken dependency causing CircleCI failures
'docutils==0.15',
# django-taggit 1.3.0 made changes to verbose_name which affect migrations;
# the test suite migrations correspond to >=1.3.0
'django-taggit>=1.3.0,<2.0',
]
# Documentation dependencies
documentation_extras = [
'pyenchant>=3.1.1,<4',
'sphinxcontrib-spelling>=5.4.0,<6',
'Sphinx>=1.5.2',
'sphinx-autobuild>=0.6.0',
'sphinx_rtd_theme>=0.1.9',
]
setup(
name='wagtail',
version=__version__,
description='A Django content management system.',
author='Wagtail core team + contributors',
author_email='[email protected]', # For support queries, please see https://docs.wagtail.io/en/stable/support.html
url='https://wagtail.io/',
packages=find_packages(),
include_package_data=True,
license='BSD',
long_description="Wagtail is an open source content management \
system built on Django, with a strong community and commercial support. \
It’s focused on user experience, and offers precise control for \
designers and developers.\n\n\
For more details, see https://wagtail.io, https://docs.wagtail.io and \
https://github.com/wagtail/wagtail/.",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Framework :: Django',
'Framework :: Django :: 2.2',
'Framework :: Django :: 3.0',
'Framework :: Django :: 3.1',
'Framework :: Wagtail',
'Topic :: Internet :: WWW/HTTP :: Site Management',
],
python_requires='>=3.6',
install_requires=install_requires,
extras_require={
'testing': testing_extras,
'docs': documentation_extras
},
entry_points="""
[console_scripts]
wagtail=wagtail.bin.wagtail:main
""",
zip_safe=False,
cmdclass={
'sdist': sdist,
'bdist_egg': check_bdist_egg,
'assets': assets,
},
)
| FlipperPA/wagtail | setup.py | Python | bsd-3-clause | 3,993 |
import datetime
import math
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.apps import apps
from django.db import connection
from radio.models import *
from django.db.utils import IntegrityError
table_list = (
#'auth_group',
#'auth_group_permissions',
#'auth_permission',
'radio_plan',
'auth_user',
#'auth_user_groups',
#'auth_user_user_permissions',
'account_emailaddress',
'account_emailconfirmation',
#'django_admin_log',
#'django_content_type',
#'django_migrations',
#'django_session',
'sites_site',
'radio_repeatersite',
'radio_service',
'radio_system',
'radio_agency',
'radio_source',
#'radio_profile',
'radio_siteoption',
'radio_talkgroup',
'radio_talkgroupaccess',
'radio_scanlist',
'radio_scanlist_talkgroups',
'radio_menuscanlist',
'radio_menutalkgrouplist',
#'radio_tranmissionunit',
#'radio_transmission',
'radio_unit',
'radio_webhtml',
'radio_siteoption',
'radio_stripeplanmatrix',
'socialaccount_socialaccount',
'socialaccount_socialapp',
'socialaccount_socialapp_sites',
'socialaccount_socialtoken',
)
class Command(BaseCommand):
help = 'Move all data from the old db into a new one'
def add_arguments(self, parser):
parser.add_argument('-f', '--fix-seq', action='store_true', dest='fix-seq', default=False)
parser.add_argument('-F', '--fix-all-seq', action='store_true', dest='fix-all-seq', default=False)
def handle(self, *args, **options):
move_all_db_data(options)
def move_all_db_data(opt):
global table_list
db_engine = connection.vendor
#table_list = ()
move_data = True
if opt['fix-seq']:
table_list = ('radio_transmission',)
move_data = False
if opt['fix-all-seq']:
table_list = table_list + ('radio_transmission',)
move_data = False
if move_data == False and db_engine != 'postgresql':
raise CommandError('Fix Sequance is only used for a postgress database')
for table_info in table_list:
tb = table_info.split('_',1)
app = tb[0]
table = tb[1]
if move_data:
print("Moving data from app {} table {} into new db".format(app,table))
else:
print("Fixing postgress for app {} table {} into new db".format(app,table))
try:
tb_model = apps.get_model(app_label=app, model_name=table)
except:
print("Model {} does not exist, skipping..".format(table))
continue
if move_data:
tb_data = tb_model.objects.using('old').all()
for rec in tb_data:
rec.save(using='default')
if db_engine == 'postgresql':
with connection.cursor() as cursor:
update_seq = "SELECT setval(pg_get_serial_sequence('{}', 'id'), coalesce(max(id),0) + 1, false) FROM {};".format(tb_model._meta.db_table, tb_model._meta.db_table)
#print("Run",update_seq)
cursor.execute(update_seq)
if move_data:
# Now tans
amount = 5000
total = Transmission.objects.using('old').count()
print("Total transmissions",total)
end_rec = total - 1
start_rec = end_rec - amount
start_time = None
end_time = None
if start_rec < 0:
start_rec = 0
while end_rec > 0:
run_time = "UNK"
if end_time:
#print("End {} Start {} diff {} * ( end {} / total {})".format(end_time, start_time, (end_time - start_time).seconds, end_rec, amount))
d = divmod((end_time - start_time).seconds * (end_rec / amount ),86400) # days
h = divmod(d[1],3600) # hours
m = divmod(h[1],60) # minutes
s = m[1] # seconds
run_time = '{}:{}:{}:{}'.format(math.floor(d[0]),math.floor(h[0]),math.floor(m[0]),math.floor(s))
print('Importing Trans {} to {} Est Run time {}'.format(start_rec,end_rec,run_time))
start_time = datetime.datetime.now()
trans = Transmission.objects.using('old').all()[start_rec:end_rec]
for rec in trans:
rec.save(using='default')
end_time = datetime.datetime.now()
end_rec = end_rec - amount
start_rec = start_rec - amount
if start_rec < 0:
start_rec = 0
with connection.cursor() as cursor:
update_seq = "SELECT setval(pg_get_serial_sequence('{}', 'id'), coalesce(max(id),0) + 1, false) FROM {};".format(Transmission._meta.db_table, Transmission._meta.db_table)
cursor.execute(update_seq)
| ScanOC/trunk-player | radio/management/commands/move_db.py | Python | mit | 4,782 |
# Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sqlalchemy as sa
from ibis.sql.alchemy import unary, varargs, fixed_arity
import ibis.sql.alchemy as alch
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
import ibis.expr.types as ir
import ibis.common as com
_operation_registry = alch._operation_registry.copy()
def _cast(t, expr):
# It's not all fun and games with SQLite
op = expr.op()
arg, target_type = op.args
sa_arg = t.translate(arg)
sa_type = t.get_sqla_type(target_type)
# SQLite does not have a physical date/time/timestamp type, so
# unfortunately cast to typestamp must be a no-op, and we have to trust
# that the user's data can actually be correctly parsed by SQLite.
if isinstance(target_type, dt.Timestamp):
if not isinstance(arg, (ir.IntegerValue, ir.StringValue)):
raise com.TranslationError(type(arg))
return sa_arg
if isinstance(arg, ir.CategoryValue) and target_type == 'int32':
return sa_arg
else:
return sa.cast(sa_arg, sa_type)
def _substr(t, expr):
f = sa.func.substr
arg, start, length = expr.op().args
sa_arg = t.translate(arg)
sa_start = t.translate(start)
if length is None:
return f(sa_arg, sa_start + 1)
else:
sa_length = t.translate(length)
return f(sa_arg, sa_start + 1, sa_length)
def _string_right(t, expr):
f = sa.func.substr
arg, length = expr.op().args
sa_arg = t.translate(arg)
sa_length = t.translate(length)
return f(sa_arg, -sa_length, sa_length)
def _string_find(t, expr):
arg, substr, start, _ = expr.op().args
if start is not None:
raise NotImplementedError
sa_arg = t.translate(arg)
sa_substr = t.translate(substr)
f = sa.func.instr
return f(sa_arg, sa_substr) - 1
def _infix_op(infix_sym):
def formatter(t, expr):
op = expr.op()
left, right = op.args
left_arg = t.translate(left)
right_arg = t.translate(right)
return left_arg.op(infix_sym)(right_arg)
return formatter
def _strftime(t, expr):
arg, format = expr.op().args
sa_arg = t.translate(arg)
sa_format = t.translate(format)
return sa.func.strftime(sa_format, sa_arg)
def _strftime_int(fmt):
def translator(t, expr):
arg, = expr.op().args
sa_arg = t.translate(arg)
return sa.cast(sa.func.strftime(fmt, sa_arg), sa.types.INTEGER)
return translator
def _now(t, expr):
return sa.func.datetime('now')
def _millisecond(t, expr):
arg, = expr.op().args
sa_arg = t.translate(arg)
fractional_second = sa.func.strftime('%f', sa_arg)
return (fractional_second * 1000) % 1000
def _identical_to(t, expr):
left, right = args = expr.op().args
if left.equals(right):
return True
else:
left, right = map(t.translate, args)
return sa.func.coalesce(
(left.is_(None) & right.is_(None)) | (left == right),
False
)
_operation_registry.update({
ops.Cast: _cast,
ops.Substring: _substr,
ops.StrRight: _string_right,
ops.StringFind: _string_find,
ops.StringLength: unary('length'),
ops.Least: varargs(sa.func.min),
ops.Greatest: varargs(sa.func.max),
ops.IfNull: fixed_arity(sa.func.ifnull, 2),
ops.Lowercase: unary('lower'),
ops.Uppercase: unary('upper'),
ops.Strip: unary('trim'),
ops.LStrip: unary('ltrim'),
ops.RStrip: unary('rtrim'),
ops.StringReplace: fixed_arity(sa.func.replace, 3),
ops.StringSQLLike: _infix_op('LIKE'),
ops.RegexSearch: _infix_op('REGEXP'),
ops.Strftime: _strftime,
ops.ExtractYear: _strftime_int('%Y'),
ops.ExtractMonth: _strftime_int('%m'),
ops.ExtractDay: _strftime_int('%d'),
ops.ExtractHour: _strftime_int('%H'),
ops.ExtractMinute: _strftime_int('%M'),
ops.ExtractSecond: _strftime_int('%S'),
ops.ExtractMillisecond: _millisecond,
ops.TimestampNow: _now,
ops.IdenticalTo: _identical_to,
})
def add_operation(op, translation_func):
_operation_registry[op] = translation_func
class SQLiteExprTranslator(alch.AlchemyExprTranslator):
_registry = _operation_registry
_rewrites = alch.AlchemyExprTranslator._rewrites.copy()
_type_map = alch.AlchemyExprTranslator._type_map.copy()
_type_map.update({
dt.Double: sa.types.REAL,
dt.Float: sa.types.REAL
})
rewrites = SQLiteExprTranslator.rewrites
compiles = SQLiteExprTranslator.compiles
class SQLiteDialect(alch.AlchemyDialect):
translator = SQLiteExprTranslator
| wesm/ibis | ibis/sql/sqlite/compiler.py | Python | apache-2.0 | 5,142 |
# Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import sys
import vcstools
from wstool.config_yaml import get_path_spec_from_yaml
def checkout_rosinstall(rosinstall_data, verbose=False):
"""
:param rosinstall_data: yaml dict in rosinstall format
:raises: rosinstall.common.MultiProjectException for incvalid yaml
"""
for frag in rosinstall_data:
path_spec = get_path_spec_from_yaml(frag)
if verbose:
print(path_spec.get_scmtype(),
path_spec.get_path(),
path_spec.get_uri(),
path_spec.get_version())
vcs_client = vcstools.get_vcs_client(path_spec.get_scmtype(),
path_spec.get_path())
vcs_client.checkout(path_spec.get_uri(),
path_spec.get_version())
| tkruse/rosinstall | src/rosinstall/simple_checkout.py | Python | bsd-3-clause | 2,400 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of <%= package.name %>.
# <%= package.url %>
# Licensed under the <%= package.license %> license:
# http://www.opensource.org/licenses/<%= package.license%>-license
# Copyright (c) <%= package.created.year %> <%= package.author.name %> <%= package.author.email %>
from setuptools import setup, find_packages
from <%= package.name %> import __version__
tests_require = [
'mock',
'nose',
'coverage',
'yanc',
'preggy',
'tox',
'ipdb',
'coveralls',
'sphinx',
]
setup(
name='<%= package.name %>',
version=__version__,
description='<%= package.description %>',
long_description='''
<%= package.description %>
''',
keywords='<%= package.keywords %>',
author='<%= package.author.name %>',
author_email='<%= package.author.email %>',
url='<%= package.url %>',
license='<%= package.license %>',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: <%= package.license %> License',
'Natural Language :: English',
'Operating System :: Unix',
<% for (var i=0; i< package.troves.length; i++) { %>"<%= package.troves[i] %>",
<% } %>'Operating System :: OS Independent',
],
packages=find_packages(),
include_package_data=<%= package.includePackageData ? "True" : "False" %>,
install_requires=[
# add your dependencies here
# remember to use 'package-name>=x.y.z,<x.y+1.0' notation (this way you get bugfixes)
],
extras_require={
'tests': tests_require,
},
entry_points={
'console_scripts': [
# add cli scripts here in this form:
# '<%= package.name %>=<%= package.name %>.cli:main',
],
},
)
| heynemann/generator-tornado | app/templates/_setup.py | Python | mit | 1,832 |
# Copyright (c) 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from absl import app
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
import test_util
# Detect if history deletion is enabled or disabled and print the result.
# The way to check is:
# - visit chrome://history;
# - get the first history item;
# - check the checkbox. If history deletion is disabled, then the check
# box has attribute 'disabled';
# TODO(crbug.com/986444): move those helper methods into test_util.py once
def getElementFromShadowRoot(driver, element, selector):
if element is None:
return None
else:
return driver.execute_script(
"return arguments[0].shadowRoot.querySelector(arguments[1])", element,
selector)
def main(argv):
driver = test_util.create_chrome_webdriver()
try:
driver.get('http://www.google.com')
driver.get('chrome://history')
# wait for page to be loaded
wait = WebDriverWait(driver, 10)
wait.until(
expected_conditions.visibility_of_element_located((By.TAG_NAME,
'history-app')))
history_app = driver.find_element_by_css_selector("history-app")
histroy_list = getElementFromShadowRoot(driver, history_app, "history-list")
# get the checkbox of the first history item
histroy_item = getElementFromShadowRoot(driver, histroy_list,
'history-item')
checkbox = getElementFromShadowRoot(driver, histroy_item,
'#main-container cr-checkbox')
disabled = checkbox.get_attribute('disabled')
if disabled == 'true':
print('DISABLED')
else:
print('ENABLED')
finally:
driver.quit()
if __name__ == '__main__':
app.run(main)
| ric2b/Vivaldi-browser | chromium/chrome/test/enterprise/e2e/policy/allow_deleting_browser_history/allow_deleting_browser_history_webdriver_test.py | Python | bsd-3-clause | 1,991 |
# Copyright (C) 2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest as ut
import importlib_wrapper
tutorial, skipIfMissingFeatures = importlib_wrapper.configure_and_import(
"@TUTORIALS_DIR@/04-lattice_boltzmann/04-lattice_boltzmann_part2.py",
gpu=True, loops=400)
@skipIfMissingFeatures
class Tutorial(ut.TestCase):
system = tutorial.system
if __name__ == "__main__":
ut.main()
| mkuron/espresso | testsuite/scripts/tutorials/test_04-lattice_boltzmann_part2.py | Python | gpl-3.0 | 1,053 |
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class TanitJobsCategory(models.Model):
name = models.CharField(max_length=255, unique=True)
def __str__(self):
return "%s" % self.name
class KeeJobsCategory(models.Model):
name = models.CharField(max_length=255, unique=True)
def __str__(self):
return "%s" % self.name
| firasbenmakhlouf/JobLookup | metadata/models.py | Python | mit | 467 |
# encoding=utf-8
## SOLVED 2013/12/24
## 45228
# We shall say that an n-digit number is pandigital if it makes use of all the
# digits 1 to n exactly once; for example, the 5-digit number, 15234, is 1
# through 5 pandigital.
# The product 7254 is unusual, as the identity, 39 × 186 = 7254, containing
# multiplicand, multiplier, and product is 1 through 9 pandigital.
# Find the sum of all products whose multiplicand/multiplier/product identity
# can be written as a 1 through 9 pandigital.
# HINT: Some products can be obtained in more than one way so be sure to only
# include it once in your sum.
import helpers.sequence as sequence
def euler():
products_cache = {}
accumulator = 0
for permutation in sequence.permutations('123456789'):
permutation = ''.join(permutation)
products = valid_products(permutation)
for product in products:
if not product in products_cache:
accumulator += product
products_cache [product] = True
return accumulator
def valid_products(permutation):
products = []
for split_1 in range(1, 5):
for split_2 in(5 - split_1, 4 - split_1):
if split_2 > 0:
split_2 += split_1
multiplicand = int(permutation [: split_1])
multiplier = int(permutation [split_1 : split_2])
product = int(permutation [split_2 :])
if multiplicand * multiplier == product:
products.append(product)
return products
| 6112/project-euler | problems/032.py | Python | mit | 1,539 |
from django.apps import apps
import logging
import urlparse
import random
import requests
from framework.exceptions import HTTPError
from framework.celery_tasks import app as celery_app
from framework import sentry
from website import settings, mails
from website.util.share import GraphNode, format_contributor, format_subject
from website.identifiers.utils import request_identifiers_from_ezid, get_ezid_client, build_ezid_metadata, parse_identifiers
logger = logging.getLogger(__name__)
@celery_app.task(ignore_results=True)
def on_preprint_updated(preprint_id, update_share=True, share_type=None, old_subjects=None):
# WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable
# transactions are implemented in View and Task application layers.
from osf.models import PreprintService
preprint = PreprintService.load(preprint_id)
if old_subjects is None:
old_subjects = []
if preprint.node:
status = 'public' if preprint.node.is_public else 'unavailable'
try:
update_ezid_metadata_on_change(preprint, status=status)
except HTTPError as err:
sentry.log_exception()
sentry.log_message(err.args[0])
if update_share:
update_preprint_share(preprint, old_subjects, share_type)
def update_preprint_share(preprint, old_subjects=None, share_type=None):
if settings.SHARE_URL:
if not preprint.provider.access_token:
raise ValueError('No access_token for {}. Unable to send {} to SHARE.'.format(preprint.provider, preprint))
share_type = share_type or preprint.provider.share_publish_type
_update_preprint_share(preprint, old_subjects, share_type)
def _update_preprint_share(preprint, old_subjects, share_type):
# Any modifications to this function may need to change _async_update_preprint_share
data = serialize_share_preprint_data(preprint, share_type, old_subjects)
resp = send_share_preprint_data(preprint, data)
try:
resp.raise_for_status()
except Exception:
if resp.status_code >= 500:
_async_update_preprint_share.delay(preprint._id, old_subjects)
else:
send_desk_share_preprint_error(preprint, resp, 0)
@celery_app.task(bind=True, max_retries=4, acks_late=True)
def _async_update_preprint_share(self, preprint_id, old_subjects, share_type):
# Any modifications to this function may need to change _update_preprint_share
# Takes preprint_id to ensure async retries push fresh data
PreprintService = apps.get_model('osf.PreprintService')
preprint = PreprintService.load(preprint_id)
data = serialize_share_preprint_data(preprint, share_type, old_subjects)
resp = send_share_preprint_data(preprint, data)
try:
resp = requests.post('{}api/v2/normalizeddata/'.format(settings.SHARE_URL), json={
'data': {
'type': 'NormalizedData',
'attributes': {
'tasks': [],
'raw': None,
'data': {'@graph': format_preprint(preprint, share_type, old_subjects)}
}
}
}, headers={'Authorization': 'Bearer {}'.format(preprint.provider.access_token), 'Content-Type': 'application/vnd.api+json'})
logger.debug(resp.content)
resp.raise_for_status()
except Exception as e:
if resp.status_code >= 500:
if self.request.retries == self.max_retries:
send_desk_share_preprint_error(preprint, resp, self.request.retries)
raise self.retry(
exc=e,
countdown=(random.random() + 1) * min(60 + settings.CELERY_RETRY_BACKOFF_BASE ** self.request.retries, 60 * 10)
)
else:
send_desk_share_preprint_error(preprint, resp, self.request.retries)
def serialize_share_preprint_data(preprint, share_type, old_subjects):
return {
'data': {
'type': 'NormalizedData',
'attributes': {
'tasks': [],
'raw': None,
'data': {'@graph': format_preprint(preprint, share_type, old_subjects)}
}
}
}
def send_share_preprint_data(preprint, data):
resp = requests.post('{}api/v2/normalizeddata/'.format(settings.SHARE_URL), json=data, headers={'Authorization': 'Bearer {}'.format(preprint.provider.access_token), 'Content-Type': 'application/vnd.api+json'})
logger.debug(resp.content)
return resp
def format_preprint(preprint, share_type, old_subjects=None):
if old_subjects is None:
old_subjects = []
from osf.models import Subject
old_subjects = [Subject.objects.get(id=s) for s in old_subjects]
preprint_graph = GraphNode(share_type, **{
'title': preprint.node.title,
'description': preprint.node.description or '',
'is_deleted': (
not preprint.is_published or
not preprint.node.is_public or
preprint.node.is_preprint_orphan or
preprint.node.tags.filter(name='qatest').exists() or
preprint.node.is_deleted
),
'date_updated': preprint.date_modified.isoformat(),
'date_published': preprint.date_published.isoformat() if preprint.date_published else None
})
to_visit = [
preprint_graph,
GraphNode('workidentifier', creative_work=preprint_graph, uri=urlparse.urljoin(settings.DOMAIN, preprint._id + '/'))
]
if preprint.get_identifier('doi'):
to_visit.append(GraphNode('workidentifier', creative_work=preprint_graph, uri='http://dx.doi.org/{}'.format(preprint.get_identifier('doi').value)))
if preprint.provider.domain_redirect_enabled:
to_visit.append(GraphNode('workidentifier', creative_work=preprint_graph, uri=preprint.absolute_url))
if preprint.article_doi:
# Article DOI refers to a clone of this preprint on another system and therefore does not qualify as an identifier for this preprint
related_work = GraphNode('creativework')
to_visit.append(GraphNode('workrelation', subject=preprint_graph, related=related_work))
to_visit.append(GraphNode('workidentifier', creative_work=related_work, uri='http://dx.doi.org/{}'.format(preprint.article_doi)))
preprint_graph.attrs['tags'] = [
GraphNode('throughtags', creative_work=preprint_graph, tag=GraphNode('tag', name=tag))
for tag in preprint.node.tags.values_list('name', flat=True) if tag
]
current_subjects = [
GraphNode('throughsubjects', creative_work=preprint_graph, is_deleted=False, subject=format_subject(s))
for s in preprint.subjects.all()
]
deleted_subjects = [
GraphNode('throughsubjects', creative_work=preprint_graph, is_deleted=True, subject=format_subject(s))
for s in old_subjects if not preprint.subjects.filter(id=s.id).exists()
]
preprint_graph.attrs['subjects'] = current_subjects + deleted_subjects
to_visit.extend(format_contributor(preprint_graph, user, preprint.node.get_visible(user), i) for i, user in enumerate(preprint.node.contributors))
to_visit.extend(GraphNode('AgentWorkRelation', creative_work=preprint_graph, agent=GraphNode('institution', name=institution))
for institution in preprint.node.affiliated_institutions.values_list('name', flat=True))
visited = set()
to_visit.extend(preprint_graph.get_related())
while True:
if not to_visit:
break
n = to_visit.pop(0)
if n in visited:
continue
visited.add(n)
to_visit.extend(list(n.get_related()))
return [node.serialize() for node in visited]
@celery_app.task(ignore_results=True)
def get_and_set_preprint_identifiers(preprint):
ezid_response = request_identifiers_from_ezid(preprint)
id_dict = parse_identifiers(ezid_response)
preprint.set_identifier_values(doi=id_dict['doi'], ark=id_dict['ark'])
@celery_app.task(ignore_results=True)
def update_ezid_metadata_on_change(target_object, status):
if (settings.EZID_USERNAME and settings.EZID_PASSWORD) and target_object.get_identifier('doi'):
client = get_ezid_client()
doi, metadata = build_ezid_metadata(target_object)
client.change_status_identifier(status, doi, metadata)
def send_desk_share_preprint_error(preprint, resp, retries):
mails.send_mail(
to_addr=settings.SUPPORT_EMAIL,
mail=mails.SHARE_PREPRINT_ERROR_DESK,
preprint=preprint,
resp=resp,
retries=retries,
)
| caneruguz/osf.io | website/preprints/tasks.py | Python | apache-2.0 | 8,601 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from db.Corpus_DB import Corpus_DB
from db.BOW_DB import BOW_DB
from db.LDA_DB import LDA_DB
from db.ITM_DB import ITM_DB
from vis.ScatterPlot1 import ScatterPlot1 as ScatterPlot1Handler
def index():
with Corpus_DB() as corpus_db:
with BOW_DB() as bow_db:
with LDA_DB() as lda_db:
handler = ScatterPlot1Handler(request, response, corpus_db, bow_db, lda_db)
response.delimiters = ('[[', ']]')
return handler.GenerateResponse()
def gib():
with Corpus_DB() as corpus_db:
with BOW_DB() as bow_db:
with LDA_DB() as lda_db:
handler = ScatterPlot1Handler(request, response, corpus_db, bow_db, lda_db)
handler.UpdateModel()
handler.InspectModel()
handler.LoadGIB()
dataStr = json.dumps(handler.content, encoding='utf-8', indent=2, sort_keys=True)
response.headers['Content-Type'] = 'application/json'
return dataStr
| maoxuxiang/termite_mallet_project | server_src/controllers/ScatterPlot1.py | Python | bsd-3-clause | 897 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2020 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Test Progress widget."""
import pytest
from qutebrowser.mainwindow.statusbar import progress
from qutebrowser.utils import usertypes, utils
@pytest.fixture
def progress_widget(qtbot, config_stub):
"""Create a Progress widget and checks its initial state."""
widget = progress.Progress()
widget.enabled = True
qtbot.add_widget(widget)
assert not widget.isVisible()
assert not widget.isTextVisible()
return widget
def test_load_started(progress_widget):
"""Ensure the Progress widget reacts properly when the page starts loading.
Args:
progress_widget: Progress widget that will be tested.
"""
progress_widget.on_load_started()
assert progress_widget.value() == 0
assert progress_widget.isVisible()
@pytest.mark.parametrize('progress, load_status, expected_visible', [
(15, usertypes.LoadStatus.loading, True),
(100, usertypes.LoadStatus.success, False),
(100, usertypes.LoadStatus.error, False),
(100, usertypes.LoadStatus.warn, False),
(100, usertypes.LoadStatus.none, False),
])
def test_tab_changed(fake_web_tab, progress_widget, progress, load_status,
expected_visible):
"""Test that progress widget value and visibility state match expectations.
Args:
progress_widget: Progress widget that will be tested.
"""
tab = fake_web_tab(progress=progress, load_status=load_status)
progress_widget.on_tab_changed(tab)
actual = progress_widget.value(), progress_widget.isVisible()
expected = tab.progress(), expected_visible
assert actual == expected
def test_progress_affecting_statusbar_height(config_stub, fake_statusbar,
progress_widget):
"""Make sure the statusbar stays the same height when progress is shown.
https://github.com/qutebrowser/qutebrowser/issues/886
https://github.com/qutebrowser/qutebrowser/pull/890
"""
if not utils.is_mac:
# There is a difference depending on the font. This seems to avoid
# this, but on macOS, we get a warning about the font not being found.
config_stub.val.fonts.statusbar = '8pt Monospace'
fake_statusbar.container.expose()
expected_height = fake_statusbar.fontMetrics().height()
assert fake_statusbar.height() == expected_height
fake_statusbar.hbox.addWidget(progress_widget)
progress_widget.show()
assert fake_statusbar.height() == expected_height
def test_progress_big_statusbar(qtbot, fake_statusbar, progress_widget):
"""Make sure the progress bar is small with a big statusbar.
https://github.com/qutebrowser/qutebrowser/commit/46d1760798b730852e2207e2cdc05a9308e44f80
"""
fake_statusbar.hbox.addWidget(progress_widget)
progress_widget.show()
expected_height = progress_widget.height()
fake_statusbar.hbox.addStrut(50)
assert progress_widget.height() == expected_height
| The-Compiler/qutebrowser | tests/unit/mainwindow/statusbar/test_progress.py | Python | gpl-3.0 | 3,724 |
# -*- coding: utf-8 -*-
from outwiker.gui.baseaction import BaseAction
from .i18n import get_
from . import defines
class BaseHeadAction(BaseAction):
"""
Базовый класс действий для вставки команд
"""
def __init__(self, application):
self._application = application
global _
_ = get_()
def _getEditor(self):
"""
Возвращает указатель на редактор
"""
return self._application.mainWindow.pagePanel.pageView.codeEditor
class TitleAction(BaseHeadAction):
"""
Вставить команду (:title:)
"""
stringId = '%sInsertTitle' % defines.PREFIX_ID
@property
def title(self):
return _('Title (:title ...:)')
@property
def description(self):
return _(defines.ACTION_DESCRIPTION) % 'title'
def run(self, params):
self._getEditor().turnText('(:title ', ':)')
class DescriptionAction(BaseHeadAction):
"""
Вставить команду (:description:)
"""
stringId = '%sInsertDescription' % defines.PREFIX_ID
@property
def title(self):
return _('Description (:description ...:)')
@property
def description(self):
return _(defines.ACTION_DESCRIPTION) % 'description'
def run(self, params):
self._getEditor().turnText('(:description ', ':)')
class KeywordsAction(BaseHeadAction):
"""
Вставить команду (:keywords:)
"""
stringId = '%sInsertKeywords' % defines.PREFIX_ID
@property
def title(self):
return _('Keywords (:keywords ...:)')
@property
def description(self):
return _(defines.ACTION_DESCRIPTION) % 'keywords'
def run(self, params):
self._getEditor().turnText('(:keywords ', ':)')
class CustomHeadsAction(BaseHeadAction):
"""
Вставить команду (:htmlhead:)
"""
stringId = '%sInsertHtmlHead' % defines.PREFIX_ID
@property
def title(self):
return _('Custom head (:htmlhead:)')
@property
def description(self):
return _(defines.ACTION_DESCRIPTION) % 'htmlhead'
def run (self, params):
self._getEditor().turnText('(:htmlhead:)\n', '\n(:htmlheadend:)')
class HtmlAttrsAction(BaseHeadAction):
"""
Вставить команду (:htmlattrs:)
"""
stringId = '%sInsertHtmlAttrs' % defines.PREFIX_ID
@property
def title (self):
return _('<html> tag attributes (:htmlattrs ...:)')
@property
def description (self):
return _(defines.ACTION_DESCRIPTION) % 'htmlattrs'
def run (self, params):
self._getEditor().turnText('(:htmlattrs ', ':)')
class StyleAction(BaseHeadAction):
"""
Вставить команду (:style:)
"""
stringId = '%sInsertStyle' % defines.PREFIX_ID
@property
def title (self):
return _('Custom Style (:style ...:)')
@property
def description (self):
return _(defines.ACTION_DESCRIPTION) % 'style'
def run (self, params):
self._getEditor().turnText('(:style:)\n', '\n(:styleend:)')
actions = (TitleAction,
DescriptionAction,
KeywordsAction,
CustomHeadsAction,
HtmlAttrsAction,
StyleAction)
| unreal666/outwiker | plugins/htmlheads/htmlheads/actions.py | Python | gpl-3.0 | 3,329 |
{
'name' : 'Product status at website shop',
'version' : '1.0.1',
'author' : 'IT-Projects LLC, Ivan Yelizariev',
'license': 'GPL-3',
'category' : 'Sale',
'website' : 'https://yelizariev.github.io',
'depends' : ['website_sale', 'stock'],
'data':[
'website_sale_stock_status_views.xml',
'website_sale_stock_status_data.xml',
],
'installable': True
}
| Endika/website-addons | website_sale_stock_status/__openerp__.py | Python | lgpl-3.0 | 408 |
# -*- coding: utf-8 -*-
"""Templates for description tags <dd>, <dl> and <dt>"""
from ..environment import env
dd = env.from_string("""\
<dd>{%- if text -%} {{ text }} {%- endif -%}</dd>
""")
dl = env.from_string("""\
<dl>{%- if text -%} {{ text }} {%- endif -%}</dl>
""")
dt = env.from_string("""\
<dt>{%- if text -%} {{ text }} {%- endif -%}</dt>
""")
| bharadwajyarlagadda/korona | korona/templates/html/tags/description.py | Python | mit | 358 |
#----------------------------------------------------------------------
# Name: wxPython.tools.img2py
# Purpose: Convert an image to Python code.
#
# Author: Robin Dunn
#
# RCS-ID: $Id: img2py.py,v 1.5 2003/12/22 19:09:46 RD Exp $
# Copyright: (c) 2002 by Total Control Software
# Licence: wxWindows license
#----------------------------------------------------------------------
# 12/21/2003 - Jeff Grimmett ([email protected])
#
# o V2.5 compatability update
#
"""
img2py.py -- Convert an image to PNG format and embed it in a Python
module with appropriate code so it can be loaded into
a program at runtime. The benefit is that since it is
Python source code it can be delivered as a .pyc or
'compiled' into the program using freeze, py2exe, etc.
Usage:
img2py.py [options] image_file python_file
Options:
-m <#rrggbb> If the original image has a mask or transparency defined
it will be used by default. You can use this option to
override the default or provide a new mask by specifying
a colour in the image to mark as transparent.
-n <name> Normally generic names (getBitmap, etc.) are used for the
image access functions. If you use this option you can
specify a name that should be used to customize the access
fucntions, (getNameBitmap, etc.)
-c Maintain a catalog of names that can be used to reference
images. Catalog can be accessed via catalog and index attributes
of the module. If the -n <name> option is specified then <name>
is used for the catalog key and index value, otherwise
the filename without any path or extension is used as the key.
-a This flag specifies that the python_file should be appended
to instead of overwritten. This in combination with -n will
allow you to put multiple images in one Python source file.
-u Don't use compression. Leaves the data uncompressed.
-i Also output a function to return the image as a wxIcon.
"""
#
# Changes:
# - Cliff Wells <[email protected]>
# 20021206: Added catalog (-c) option.
#
# 12/21/2003 - Jeff Grimmett ([email protected])
#
# o V2.5 compatability update
#
import cPickle
import cStringIO
import getopt
import glob
import os
import sys
import tempfile
import zlib
import wx
import img2img
def crunch_data(data, compressed):
# compress it?
if compressed:
data = zlib.compress(data, 9)
# convert to a printable format, so it can be in a Python source file
data = repr(data)
# This next bit is borrowed from PIL. It is used to wrap the text intelligently.
fp = cStringIO.StringIO()
data = data + " " # buffer for the +1 test
c = i = 0
word = ""
octdigits = "01234567"
hexdigits = "0123456789abcdef"
while i < len(data):
if data[i] != "\\":
word = data[i]
i = i + 1
else:
if data[i+1] in octdigits:
for n in range(2, 5):
if data[i+n] not in octdigits:
break
word = data[i:i+n]
i = i + n
elif data[i+1] == 'x':
for n in range(2, 5):
if data[i+n] not in hexdigits:
break
word = data[i:i+n]
i = i + n
else:
word = data[i:i+2]
i = i + 2
l = len(word)
if c + l >= 78-1:
fp.write("\\\n")
c = 0
fp.write(word)
c = c + l
# return the formatted compressed data
return fp.getvalue()
def main(args):
if not args or ("-h" in args):
print __doc__
return
# some bitmap related things need to have a wxApp initialized...
if wx.GetApp() is None:
app = wx.PySimpleApp()
append = 0
compressed = 1
maskClr = None
imgName = ""
icon = 0
catalog = 0
try:
opts, fileArgs = getopt.getopt(args, "auicn:m:")
except getopt.GetoptError:
print __doc__
return
for opt, val in opts:
if opt == "-a":
append = 1
elif opt == "-u":
compressed = 0
elif opt == "-n":
imgName = val
elif opt == "-m":
maskClr = val
elif opt == "-i":
icon = 1
elif opt == "-c":
catalog = 1
if len(fileArgs) != 2:
print __doc__
return
image_file, python_file = fileArgs
# convert the image file to a temporary file
tfname = tempfile.mktemp()
ok, msg = img2img.convert(image_file, maskClr, None, tfname, wx.BITMAP_TYPE_PNG, ".png")
if not ok:
print msg
return
data = open(tfname, "rb").read()
data = crunch_data(data, compressed)
os.unlink(tfname)
if append:
out = open(python_file, "a")
else:
out = open(python_file, "w")
if catalog:
pyPath, pyFile = os.path.split(python_file)
imgPath, imgFile = os.path.split(image_file)
if not imgName:
imgName = os.path.splitext(imgFile)[0]
print "\nWarning: -n not specified. Using filename (%s) for catalog entry." % imgName
old_index = []
if append:
# check to see if catalog exists already (file may have been created
# with an earlier version of img2py or without -c option)
oldSysPath = sys.path[:]
sys.path = [pyPath] # make sure we don't import something else by accident
mod = __import__(os.path.splitext(pyFile)[0])
if 'index' not in dir(mod):
print "\nWarning: %s was originally created without catalog." % python_file
print " Any images already in file will not be cataloged.\n"
out.write("\n# ***************** Catalog starts here *******************")
out.write("\n\ncatalog = {}\n")
out.write("index = []\n\n")
out.write("class ImageClass: pass\n\n")
else: # save a copy of the old index so we can warn about duplicate names
old_index[:] = mod.index[:]
del mod
sys.path = oldSysPath[:]
out.write("#" + "-" * 70 + "\n")
if not append:
out.write("# This file was generated by %s\n#\n" % sys.argv[0])
out.write("from wx from PIL import ImageFromStream, BitmapFromImage\n")
if icon:
out.write("from wx import EmptyIcon\n")
if compressed:
out.write("import cStringIO, zlib\n\n\n")
else:
out.write("import cStringIO\n\n\n")
if catalog:
out.write("catalog = {}\n")
out.write("index = []\n\n")
out.write("class ImageClass: pass\n\n")
if compressed:
out.write("def get%sData():\n"
" return zlib.decompress(\n%s)\n\n"
% (imgName, data))
else:
out.write("def get%sData():\n"
" return \\\n%s\n\n"
% (imgName, data))
out.write("def get%sBitmap():\n"
" return BitmapFromImage(get%sImage())\n\n"
"def get%sImage():\n"
" stream = cStringIO.StringIO(get%sData())\n"
" return ImageFromStream(stream)\n\n"
% tuple([imgName] * 4))
if icon:
out.write("def get%sIcon():\n"
" icon = EmptyIcon()\n"
" icon.CopyFromBitmap(get%sBitmap())\n"
" return icon\n\n"
% tuple([imgName] * 2))
if catalog:
if imgName in old_index:
print "Warning: %s already in catalog." % imgName
print " Only the last entry will be accessible.\n"
old_index.append(imgName)
out.write("index.append('%s')\n" % imgName)
out.write("catalog['%s'] = ImageClass()\n" % imgName)
out.write("catalog['%s'].getData = get%sData\n" % tuple([imgName] * 2))
out.write("catalog['%s'].getImage = get%sImage\n" % tuple([imgName] * 2))
out.write("catalog['%s'].getBitmap = get%sBitmap\n" % tuple([imgName] * 2))
if icon:
out.write("catalog['%s'].getIcon = get%sIcon\n" % tuple([imgName] * 2))
out.write("\n\n")
if imgName:
n_msg = ' using "%s"' % imgName
else:
n_msg = ""
if maskClr:
m_msg = " with mask %s" % maskClr
else:
m_msg = ""
print "Embedded %s%s into %s%s" % (image_file, n_msg, python_file, m_msg)
if __name__ == "__main__":
main(sys.argv[1:])
| tibor95/phatch-python2.7 | phatch/other/pyWx/img2py.py | Python | gpl-3.0 | 8,953 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.tests.test import Test
from ducktape.utils.util import wait_until
import time
class ProduceConsumeValidateTest(Test):
"""This class provides a shared template for tests which follow the common pattern of:
- produce to a topic in the background
- consume from that topic in the background
- run some logic, e.g. fail topic leader etc.
- perform validation
"""
def __init__(self, test_context):
super(ProduceConsumeValidateTest, self).__init__(test_context=test_context)
# How long to wait for the producer to declare itself healthy? This can
# be overidden by inheriting classes.
self.producer_start_timeout_sec = 20
# How long to wait for the consumer to start consuming messages?
self.consumer_start_timeout_sec = 60
# How long wait for the consumer process to fork? This
# is important in the case when the consumer is starting from the end,
# and we don't want it to miss any messages. The race condition this
# timeout avoids is that the consumer has not forked even after the
# producer begins producing messages, in which case we will miss the
# initial set of messages and get spurious test failures.
self.consumer_init_timeout_sec = 0
self.enable_idempotence = False
def setup_producer_and_consumer(self):
raise NotImplementedError("Subclasses should implement this")
def start_producer_and_consumer(self):
# Start background producer and consumer
self.consumer.start()
if (self.consumer_init_timeout_sec > 0):
self.logger.debug("Waiting %ds for the consumer to initialize.",
self.consumer_init_timeout_sec)
start = int(time.time())
wait_until(lambda: self.consumer.alive(self.consumer.nodes[0]) is True,
timeout_sec=self.consumer_init_timeout_sec,
err_msg="Consumer process took more than %d s to fork" %\
self.consumer_init_timeout_sec)
end = int(time.time())
# If `JMXConnectFactory.connect` is invoked during the
# initialization of the JMX server, it may fail to throw the
# specified IOException back to the calling code. The sleep is a
# workaround that should allow initialization to complete before we
# try to connect. See KAFKA-4620 for more details.
time.sleep(1)
remaining_time = self.consumer_init_timeout_sec - (end - start)
if remaining_time < 0 :
remaining_time = 0
if self.consumer.new_consumer:
wait_until(lambda: self.consumer.has_partitions_assigned(self.consumer.nodes[0]) is True,
timeout_sec=remaining_time,
err_msg="Consumer process took more than %d s to have partitions assigned" %\
remaining_time)
self.producer.start()
wait_until(lambda: self.producer.num_acked > 5,
timeout_sec=self.producer_start_timeout_sec,
err_msg="Producer failed to produce messages for %ds." %\
self.producer_start_timeout_sec)
wait_until(lambda: len(self.consumer.messages_consumed[1]) > 0,
timeout_sec=self.consumer_start_timeout_sec,
err_msg="Consumer failed to consume messages for %ds." %\
self.consumer_start_timeout_sec)
def check_alive(self):
msg = ""
for node in self.consumer.nodes:
if not self.consumer.alive(node):
msg = "The consumer has terminated, or timed out, on node %s." % str(node.account)
for node in self.producer.nodes:
if not self.producer.alive(node):
msg += "The producer has terminated, or timed out, on node %s." % str(node.account)
if len(msg) > 0:
raise Exception(msg)
def check_producing(self):
currently_acked = self.producer.num_acked
wait_until(lambda: self.producer.num_acked > currently_acked + 5, timeout_sec=30,
err_msg="Expected producer to still be producing.")
def stop_producer_and_consumer(self):
self.check_alive()
self.check_producing()
self.producer.stop()
self.consumer.wait()
def run_produce_consume_validate(self, core_test_action=None, *args):
"""Top-level template for simple produce/consume/validate tests."""
try:
self.start_producer_and_consumer()
if core_test_action is not None:
core_test_action(*args)
self.stop_producer_and_consumer()
self.validate()
except BaseException as e:
for s in self.test_context.services:
self.mark_for_collect(s)
raise
@staticmethod
def annotate_missing_msgs(missing, acked, consumed, msg):
missing_list = list(missing)
msg += "%s acked message did not make it to the Consumer. They are: " %\
len(missing_list)
if len(missing_list) < 20:
msg += str(missing_list) + ". "
else:
msg += ", ".join(str(m) for m in missing_list[:20])
msg += "...plus %s more. Total Acked: %s, Total Consumed: %s. " \
% (len(missing_list) - 20, len(set(acked)), len(set(consumed)))
return msg
@staticmethod
def annotate_data_lost(data_lost, msg, number_validated):
print_limit = 10
if len(data_lost) > 0:
msg += "The first %s missing messages were validated to ensure they are in Kafka's data files. " \
"%s were missing. This suggests data loss. Here are some of the messages not found in the data files: %s\n" \
% (number_validated, len(data_lost), str(data_lost[0:print_limit]) if len(data_lost) > print_limit else str(data_lost))
else:
msg += "We validated that the first %s of these missing messages correctly made it into Kafka's data files. " \
"This suggests they were lost on their way to the consumer." % number_validated
return msg
def validate(self):
"""Check that each acked message was consumed."""
success = True
msg = ""
acked = self.producer.acked
consumed = self.consumer.messages_consumed[1]
# Correctness of the set difference operation depends on using equivalent message_validators in procuder and consumer
missing = set(acked) - set(consumed)
self.logger.info("num consumed: %d" % len(consumed))
# Were all acked messages consumed?
if len(missing) > 0:
msg = self.annotate_missing_msgs(missing, acked, consumed, msg)
success = False
#Did we miss anything due to data loss?
to_validate = list(missing)[0:1000 if len(missing) > 1000 else len(missing)]
data_lost = self.kafka.search_data_files(self.topic, to_validate)
msg = self.annotate_data_lost(data_lost, msg, len(to_validate))
if self.enable_idempotence:
self.logger.info("Ran a test with idempotence enabled. We expect no duplicates")
else:
self.logger.info("Ran a test with idempotence disabled.")
# Are there duplicates?
if len(set(consumed)) != len(consumed):
num_duplicates = abs(len(set(consumed)) - len(consumed))
msg += "(There are also %s duplicate messages in the log - but that is an acceptable outcome)\n" % num_duplicates
if self.enable_idempotence:
assert False, "Detected %s duplicates even though idempotence was enabled." % num_duplicates
# Collect all logs if validation fails
if not success:
for s in self.test_context.services:
self.mark_for_collect(s)
assert success, msg
| airbnb/kafka | tests/kafkatest/tests/produce_consume_validate.py | Python | apache-2.0 | 8,828 |
# Copyright 2015 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions and classes to make testing easier."""
import os
from absl import flags
import mock
from perfkitbenchmarker import benchmark_spec
from perfkitbenchmarker import sample
from perfkitbenchmarker.configs import benchmark_config_spec
import six
from six.moves import range
_BENCHMARK_NAME = 'test_benchmark'
_BENCHMARK_UID = 'uid'
class SamplesTestMixin(object):
"""A mixin for unittest.TestCase that adds a type-specific equality
predicate for samples.
"""
def __init__(self, *args, **kwargs):
super(SamplesTestMixin, self).__init__(self, *args, **kwargs)
self.addTypeEqualityFunc(sample.Sample, self.assertSamplesEqual)
def assertSamplesEqualUpToTimestamp(self, a, b, msg=None):
"""Assert that two samples are equal, ignoring timestamp differences."""
self.assertEqual(a.metric, b.metric,
msg or 'Samples %s and %s have different metrics' % (a, b))
self.assertEqual(a.value, b.value,
msg or 'Samples %s and %s have different values' % (a, b))
self.assertEqual(a.unit, b.unit,
msg or 'Samples %s and %s have different units' % (a, b))
self.assertDictEqual(a.metadata, b.metadata, msg or
'Samples %s and %s have different metadata' % (a, b))
# Deliberately don't compare the timestamp fields of the samples.
def assertSampleListsEqualUpToTimestamp(self, a, b, msg=None):
"""Compare two lists of samples.
Sadly, the builtin assertListsEqual will only use Python's
built-in equality predicate for testing the equality of elements
in a list. Since we compare lists of samples a lot, we need a
custom test for that.
"""
self.assertEqual(len(a), len(b),
msg or 'Lists %s and %s are not the same length' % (a, b))
for i in range(len(a)):
self.assertIsInstance(a[i], sample.Sample,
msg or ('%s (item %s in list) is '
'not a sample.Sample object' %
(a[i], i)))
self.assertIsInstance(b[i], sample.Sample,
msg or ('%s (item %s in list) is '
'not a sample.Sample object' %
(b[i], i)))
try:
self.assertSamplesEqualUpToTimestamp(a[i], b[i], msg=msg)
except self.failureException as ex:
ex.message = str(ex) + (' (was item %s in list)' % i)
ex.args = (ex.message,)
raise ex
def assertDiskMounts(benchmark_config, mount_point):
"""Test whether a disk mounts in a given configuration.
Sets up a virtual machine following benchmark_config and then tests
whether the path mount_point contains a working disk by trying to
create a file there. Returns nothing if file creation works;
otherwise raises an exception.
Args:
benchmark_config: a dict in the format of
benchmark_spec.BenchmarkSpec. The config must specify exactly
one virtual machine.
mount_point: a path, represented as a string.
Raises:
RemoteCommandError if it cannot create a file at mount_point and
verify that the file exists.
AssertionError if benchmark_config does not specify exactly one
virtual machine.
"""
assert len(benchmark_config['vm_groups']) == 1
vm_group = next(six.itervalues(benchmark_config['vm_groups']))
assert vm_group.get('num_vms', 1) == 1
m = mock.MagicMock()
m.BENCHMARK_NAME = _BENCHMARK_NAME
config_spec = benchmark_config_spec.BenchmarkConfigSpec(
_BENCHMARK_NAME, flag_values=flags.FLAGS, **benchmark_config)
spec = benchmark_spec.BenchmarkSpec(
m, config_spec, _BENCHMARK_UID)
with spec.RedirectGlobalFlags():
try:
spec.ConstructVirtualMachines()
spec.Provision()
vm = spec.vms[0]
test_file_path = os.path.join(mount_point, 'test_file')
vm.RemoteCommand('touch %s' % test_file_path)
# This will raise RemoteCommandError if the test file does not
# exist.
vm.RemoteCommand('test -e %s' % test_file_path)
finally:
spec.Delete()
| GoogleCloudPlatform/PerfKitBenchmarker | perfkitbenchmarker/test_util.py | Python | apache-2.0 | 4,727 |
""" Supports scanning a Tomato router. """
import logging
import json
from datetime import datetime, timedelta
import re
import threading
import requests
import homeassistant as ha
import homeassistant.util as util
from homeassistant.components.device_tracker import DOMAIN
# Return cached results if last scan was less then this time ago
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=5)
CONF_HTTP_ID = "http_id"
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def get_scanner(hass, config):
""" Validates config and returns a Tomato scanner. """
if not util.validate_config(config,
{DOMAIN: [ha.CONF_HOST, ha.CONF_USERNAME,
ha.CONF_PASSWORD, CONF_HTTP_ID]},
_LOGGER):
return None
return TomatoDeviceScanner(config[DOMAIN])
class TomatoDeviceScanner(object):
""" This class queries a wireless router running Tomato firmware
for connected devices.
A description of the Tomato API can be found on
http://paulusschoutsen.nl/blog/2013/10/tomato-api-documentation/
"""
def __init__(self, config):
host, http_id = config[ha.CONF_HOST], config[CONF_HTTP_ID]
username, password = config[ha.CONF_USERNAME], config[ha.CONF_PASSWORD]
self.req = requests.Request('POST',
'http://{}/update.cgi'.format(host),
data={'_http_id': http_id,
'exec': 'devlist'},
auth=requests.auth.HTTPBasicAuth(
username, password)).prepare()
self.parse_api_pattern = re.compile(r"(?P<param>\w*) = (?P<value>.*);")
self.logger = logging.getLogger("{}.{}".format(__name__, "Tomato"))
self.lock = threading.Lock()
self.date_updated = None
self.last_results = {"wldev": [], "dhcpd_lease": []}
self.success_init = self._update_tomato_info()
def scan_devices(self):
""" Scans for new devices and return a
list containing found device ids. """
self._update_tomato_info()
return [item[1] for item in self.last_results['wldev']]
def get_device_name(self, device):
""" Returns the name of the given device or None if we don't know. """
# Make sure there are results
if not self.date_updated:
self._update_tomato_info()
filter_named = [item[0] for item in self.last_results['dhcpd_lease']
if item[2] == device]
if not filter_named or not filter_named[0]:
return None
else:
return filter_named[0]
def _update_tomato_info(self):
""" Ensures the information from the Tomato router is up to date.
Returns boolean if scanning successful. """
self.lock.acquire()
# if date_updated is None or the date is too old we scan for new data
if not self.date_updated or \
datetime.now() - self.date_updated > MIN_TIME_BETWEEN_SCANS:
self.logger.info("Scanning")
try:
response = requests.Session().send(self.req, timeout=3)
# Calling and parsing the Tomato api here. We only need the
# wldev and dhcpd_lease values. For API description see:
# http://paulusschoutsen.nl/
# blog/2013/10/tomato-api-documentation/
if response.status_code == 200:
for param, value in \
self.parse_api_pattern.findall(response.text):
if param == 'wldev' or param == 'dhcpd_lease':
self.last_results[param] = \
json.loads(value.replace("'", '"'))
self.date_updated = datetime.now()
return True
elif response.status_code == 401:
# Authentication error
self.logger.exception((
"Failed to authenticate, "
"please check your username and password"))
return False
except requests.exceptions.ConnectionError:
# We get this if we could not connect to the router or
# an invalid http_id was supplied
self.logger.exception((
"Failed to connect to the router"
" or invalid http_id supplied"))
return False
except requests.exceptions.Timeout:
# We get this if we could not connect to the router or
# an invalid http_id was supplied
self.logger.exception(
"Connection to the router timed out")
return False
except ValueError:
# If json decoder could not parse the response
self.logger.exception(
"Failed to parse response from router")
return False
finally:
self.lock.release()
else:
# We acquired the lock before the IF check,
# release it before we return True
self.lock.release()
return True
| JMSwag/home-assistant | homeassistant/components/device_tracker/tomato.py | Python | mit | 5,382 |
'''
NPR Puzzle 2016-04-24
http://www.npr.org/2016/04/24/475299329/finish-this-puzzle-and-youll-be-just-like-overachievers-you-both-exceed
Name a famous singer -- first and last names.
The last four letters of the first name spelled
backward plus the first four letters of the last name
spelled forward, read together, in order, name a
section of products in a drugstore. What is it?
'''
import sys
sys.path.append('..')
from nprcommontools import get_famous_names
from nltk.corpus import wordnet as wn
import re
#%%
lemmas = wn.all_lemma_names()
lemma_dict = dict([(re.sub(r'[^a-z]+','',x),x) for x in lemmas if x == x.lower()])
names = get_famous_names()
for name in names.iterkeys():
if name.count(' ') == 1:
first_name, last_name = name.lower().split(' ')
if len(first_name) >= 4 and len(last_name) >= 4:
word = first_name[-1:-5:-1] + last_name[:4]
try:
print lemma_dict[word], name
except KeyError:
pass
| boisvert42/npr-puzzle-python | 2016/0424_drugstore_section.py | Python | cc0-1.0 | 1,003 |
""" Vanilla RNN
@author Graham Taylor
"""
import numpy as np
import theano
import theano.tensor as T
from sklearn.base import BaseEstimator
import logging
import time
import os
import datetime
import pickle as pickle
import math
import matplotlib.pyplot as plt
plt.ion()
mode = theano.Mode(linker='cvm')
#mode = 'DEBUG_MODE'
class RNN(object):
""" Recurrent neural network class
Supported output types:
real : linear output units, use mean-squared error
binary : binary output units, use cross-entropy error
softmax : single softmax out, use cross-entropy error
"""
def __init__(self, input, n_in, n_hidden, n_out, activation=T.tanh,
output_type='real', use_symbolic_softmax=False):
self.input = input
self.activation = activation
self.output_type = output_type
# when using HF, SoftmaxGrad.grad is not implemented
# use a symbolic softmax which is slightly slower than T.nnet.softmax
# See: http://groups.google.com/group/theano-dev/browse_thread/
# thread/3930bd5a6a67d27a
if use_symbolic_softmax:
def symbolic_softmax(x):
e = T.exp(x)
return e / T.sum(e, axis=1).dimshuffle(0, 'x')
self.softmax = symbolic_softmax
else:
self.softmax = T.nnet.softmax
# recurrent weights as a shared variable
W_init = np.asarray(np.random.uniform(size=(n_hidden, n_hidden),
low=-.01, high=.01),
dtype=theano.config.floatX)
self.W = theano.shared(value=W_init, name='W')
# input to hidden layer weights
W_in_init = np.asarray(np.random.uniform(size=(n_in, n_hidden),
low=-.01, high=.01),
dtype=theano.config.floatX)
self.W_in = theano.shared(value=W_in_init, name='W_in')
# hidden to output layer weights
W_out_init = np.asarray(np.random.uniform(size=(n_hidden, n_out),
low=-.01, high=.01),
dtype=theano.config.floatX)
self.W_out = theano.shared(value=W_out_init, name='W_out')
h0_init = np.zeros((n_hidden,), dtype=theano.config.floatX)
self.h0 = theano.shared(value=h0_init, name='h0')
bh_init = np.zeros((n_hidden,), dtype=theano.config.floatX)
self.bh = theano.shared(value=bh_init, name='bh')
by_init = np.zeros((n_out,), dtype=theano.config.floatX)
self.by = theano.shared(value=by_init, name='by')
self.params = [self.W, self.W_in, self.W_out, self.h0,
self.bh, self.by]
# for every parameter, we maintain it's last update
# the idea here is to use "momentum"
# keep moving mostly in the same direction
self.updates = {}
for param in self.params:
init = np.zeros(param.get_value(borrow=True).shape,
dtype=theano.config.floatX)
self.updates[param] = theano.shared(init)
# recurrent function (using tanh activation function) and linear output
# activation function
def step(x_t, h_tm1):
h_t = self.activation(T.dot(x_t, self.W_in) + \
T.dot(h_tm1, self.W) + self.bh)
y_t = T.dot(h_t, self.W_out) + self.by
return h_t, y_t
# the hidden state `h` for the entire sequence, and the output for the
# entire sequence `y` (first dimension is always time)
[self.h, self.y_pred], _ = theano.scan(step,
sequences=self.input,
outputs_info=[self.h0, None])
# L1 norm ; one regularization option is to enforce L1 norm to
# be small
self.L1 = 0
self.L1 += abs(self.W.sum())
self.L1 += abs(self.W_in.sum())
self.L1 += abs(self.W_out.sum())
# square of L2 norm ; one regularization option is to enforce
# square of L2 norm to be small
self.L2_sqr = 0
self.L2_sqr += (self.W ** 2).sum()
self.L2_sqr += (self.W_in ** 2).sum()
self.L2_sqr += (self.W_out ** 2).sum()
if self.output_type == 'real':
self.loss = lambda y: self.mse(y)
elif self.output_type == 'binary':
# push through sigmoid
self.p_y_given_x = T.nnet.sigmoid(self.y_pred) # apply sigmoid
self.y_out = T.round(self.p_y_given_x) # round to {0,1}
self.loss = lambda y: self.nll_binary(y)
elif self.output_type == 'softmax':
# push through softmax, computing vector of class-membership
# probabilities in symbolic form
self.p_y_given_x = self.softmax(self.y_pred)
# compute prediction as class whose probability is maximal
self.y_out = T.argmax(self.p_y_given_x, axis=-1)
self.loss = lambda y: self.nll_multiclass(y)
else:
raise NotImplementedError
def mse(self, y):
# error between output and target
return T.mean((self.y_pred - y) ** 2)
def nll_binary(self, y):
# negative log likelihood based on binary cross entropy error
return T.mean(T.nnet.binary_crossentropy(self.p_y_given_x, y))
def nll_multiclass(self, y):
# negative log likelihood based on multiclass cross entropy error
# y.shape[0] is (symbolically) the number of rows in y, i.e.,
# number of time steps (call it T) in the sequence
# T.arange(y.shape[0]) is a symbolic vector which will contain
# [0,1,2,... n-1] T.log(self.p_y_given_x) is a matrix of
# Log-Probabilities (call it LP) with one row per example and
# one column per class LP[T.arange(y.shape[0]),y] is a vector
# v containing [LP[0,y[0]], LP[1,y[1]], LP[2,y[2]], ...,
# LP[n-1,y[n-1]]] and T.mean(LP[T.arange(y.shape[0]),y]) is
# the mean (across minibatch examples) of the elements in v,
# i.e., the mean log-likelihood across the minibatch.
return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])
def errors(self, y):
"""Return a float representing the number of errors in the sequence
over the total number of examples in the sequence ; zero one
loss over the size of the sequence
:type y: theano.tensor.TensorType
:param y: corresponds to a vector that gives for each example the
correct label
"""
# check if y has same dimension of y_pred
if y.ndim != self.y_out.ndim:
raise TypeError('y should have the same shape as self.y_out',
('y', y.type, 'y_out', self.y_out.type))
if self.output_type in ('binary', 'softmax'):
# check if y is of the correct datatype
if y.dtype.startswith('int'):
# the T.neq operator returns a vector of 0s and 1s, where 1
# represents a mistake in prediction
return T.mean(T.neq(self.y_out, y))
else:
raise NotImplementedError()
class MetaRNN(BaseEstimator):
def __init__(self, n_in=5, n_hidden=50, n_out=5, learning_rate=0.01,
n_epochs=100, L1_reg=0.00, L2_reg=0.00, learning_rate_decay=1,
activation='tanh', output_type='real',
final_momentum=0.9, initial_momentum=0.5,
momentum_switchover=5,
use_symbolic_softmax=False):
self.n_in = int(n_in)
self.n_hidden = int(n_hidden)
self.n_out = int(n_out)
self.learning_rate = float(learning_rate)
self.learning_rate_decay = float(learning_rate_decay)
self.n_epochs = int(n_epochs)
self.L1_reg = float(L1_reg)
self.L2_reg = float(L2_reg)
self.activation = activation
self.output_type = output_type
self.initial_momentum = float(initial_momentum)
self.final_momentum = float(final_momentum)
self.momentum_switchover = int(momentum_switchover)
self.use_symbolic_softmax = use_symbolic_softmax
self.ready()
def ready(self):
# input (where first dimension is time)
self.x = T.matrix()
# target (where first dimension is time)
if self.output_type == 'real':
self.y = T.matrix(name='y', dtype=theano.config.floatX)
elif self.output_type == 'binary':
self.y = T.matrix(name='y', dtype='int32')
elif self.output_type == 'softmax': # only vector labels supported
self.y = T.vector(name='y', dtype='int32')
else:
raise NotImplementedError
# initial hidden state of the RNN
self.h0 = T.vector()
# learning rate
self.lr = T.scalar()
if self.activation == 'tanh':
activation = T.tanh
elif self.activation == 'sigmoid':
activation = T.nnet.sigmoid
elif self.activation == 'relu':
activation = lambda x: x * (x > 0)
elif self.activation == 'cappedrelu':
activation = lambda x: T.minimum(x * (x > 0), 6)
else:
raise NotImplementedError
self.rnn = RNN(input=self.x, n_in=self.n_in,
n_hidden=self.n_hidden, n_out=self.n_out,
activation=activation, output_type=self.output_type,
use_symbolic_softmax=self.use_symbolic_softmax)
if self.output_type == 'real':
self.predict = theano.function(inputs=[self.x, ],
outputs=self.rnn.y_pred,
mode=mode)
elif self.output_type == 'binary':
self.predict_proba = theano.function(inputs=[self.x, ],
outputs=self.rnn.p_y_given_x, mode=mode)
self.predict = theano.function(inputs=[self.x, ],
outputs=T.round(self.rnn.p_y_given_x),
mode=mode)
elif self.output_type == 'softmax':
self.predict_proba = theano.function(inputs=[self.x, ],
outputs=self.rnn.p_y_given_x, mode=mode)
self.predict = theano.function(inputs=[self.x, ],
outputs=self.rnn.y_out, mode=mode)
else:
raise NotImplementedError
def shared_dataset(self, data_xy):
""" Load the dataset into shared variables """
data_x, data_y = data_xy
shared_x = theano.shared(np.asarray(data_x,
dtype=theano.config.floatX))
shared_y = theano.shared(np.asarray(data_y,
dtype=theano.config.floatX))
if self.output_type in ('binary', 'softmax'):
return shared_x, T.cast(shared_y, 'int32')
else:
return shared_x, shared_y
def __getstate__(self):
""" Return state sequence."""
params = self._get_params() # parameters set in constructor
weights = [p.get_value() for p in self.rnn.params]
state = (params, weights)
return state
def _set_weights(self, weights):
""" Set fittable parameters from weights sequence.
Parameters must be in the order defined by self.params:
W, W_in, W_out, h0, bh, by
"""
i = iter(weights)
for param in self.rnn.params:
param.set_value(i.next())
def __setstate__(self, state):
""" Set parameters from state sequence.
Parameters must be in the order defined by self.params:
W, W_in, W_out, h0, bh, by
"""
params, weights = state
self.set_params(**params)
self.ready()
self._set_weights(weights)
def save(self, fpath='.', fname=None):
""" Save a pickled representation of Model state. """
fpathstart, fpathext = os.path.splitext(fpath)
if fpathext == '.pkl':
# User supplied an absolute path to a pickle file
fpath, fname = os.path.split(fpath)
elif fname is None:
# Generate filename based on date
date_obj = datetime.datetime.now()
date_str = date_obj.strftime('%Y-%m-%d-%H:%M:%S')
class_name = self.__class__.__name__
fname = '%s.%s.pkl' % (class_name, date_str)
fabspath = os.path.join(fpath, fname)
logging.info("Saving to %s ..." % fabspath)
file = open(fabspath, 'wb')
state = self.__getstate__()
pickle.dump(state, file, protocol=pickle.HIGHEST_PROTOCOL)
file.close()
def load(self, path):
""" Load model parameters from path. """
logging.info("Loading from %s ..." % path)
file = open(path, 'rb')
state = pickle.load(file)
self.__setstate__(state)
file.close()
def fit(self, X_train, Y_train, X_test=None, Y_test=None,
validation_frequency=100):
""" Fit model
Pass in X_test, Y_test to compute test error and report during
training.
X_train : ndarray (n_seq x n_steps x n_in)
Y_train : ndarray (n_seq x n_steps x n_out)
validation_frequency : int
in terms of number of sequences (or number of weight updates)
"""
f = file('trainProcess/trainOutput-b03-300-100-20.txt','a+')
if X_test is not None:
assert(Y_test is not None)
self.interactive = True
test_set_x, test_set_y = self.shared_dataset((X_test, Y_test))
else:
self.interactive = False
train_set_x, train_set_y = self.shared_dataset((X_train, Y_train))
n_train = train_set_x.get_value(borrow=True).shape[0]
if self.interactive:
n_test = test_set_x.get_value(borrow=True).shape[0]
######################
# BUILD ACTUAL MODEL #
######################
logging.info('... building the model')
index = T.lscalar('index') # index to a case
# learning rate (may change)
l_r = T.scalar('l_r', dtype=theano.config.floatX)
mom = T.scalar('mom', dtype=theano.config.floatX) # momentum
cost = self.rnn.loss(self.y) \
+ self.L1_reg * self.rnn.L1 \
+ self.L2_reg * self.rnn.L2_sqr
compute_train_error = theano.function(inputs=[index, ],
outputs=self.rnn.loss(self.y),
givens={
self.x: train_set_x[index],
self.y: train_set_y[index]},
mode=mode)
if self.interactive:
compute_test_error = theano.function(inputs=[index, ],
outputs=self.rnn.loss(self.y),
givens={
self.x: test_set_x[index],
self.y: test_set_y[index]},
mode=mode)
# compute the gradient of cost with respect to theta = (W, W_in, W_out)
# gradients on the weights using BPTT
gparams = []
for param in self.rnn.params:
gparam = T.grad(cost, param)
gparams.append(gparam)
updates = {}
for param, gparam in zip(self.rnn.params, gparams):
weight_update = self.rnn.updates[param]
upd = mom * weight_update - l_r * gparam
updates[weight_update] = upd
updates[param] = param + upd
# compiling a Theano function `train_model` that returns the
# cost, but in the same time updates the parameter of the
# model based on the rules defined in `updates`
train_model = theano.function(inputs=[index, l_r, mom],
outputs=cost,
updates=updates,
givens={
self.x: train_set_x[index],
self.y: train_set_y[index]},
mode=mode)
###############
# TRAIN MODEL #
###############
logging.info('... training')
epoch = 0
while (epoch < self.n_epochs):
epoch = epoch + 1
for idx in xrange(n_train):
effective_momentum = self.final_momentum \
if epoch > self.momentum_switchover \
else self.initial_momentum
example_cost = train_model(idx, self.learning_rate,
effective_momentum)
# iteration number (how many weight updates have we made?)
# epoch is 1-based, index is 0 based
iter = (epoch - 1) * n_train + idx + 1
if iter % validation_frequency == 0:
# compute loss on training set
train_losses = [compute_train_error(i)
for i in xrange(n_train)]
this_train_loss = np.mean(train_losses)
if self.interactive:
test_losses = [compute_test_error(i)
for i in xrange(n_test)]
this_test_loss = np.mean(test_losses)
f.write('epoch %i, seq %i/%i, tr loss %f '
'te loss %f lr: %f \n' % \
(epoch, idx + 1, n_train,
this_train_loss, this_test_loss, self.learning_rate))
print('epoch %i, seq %i/%i, tr loss %f '
'te loss %f lr: %f' % \
(epoch, idx + 1, n_train,
this_train_loss, this_test_loss, self.learning_rate))
else:
f.write('epoch %i, seq %i/%i, train loss %f '
'lr: %f \n' % \
(epoch, idx + 1, n_train, this_train_loss,
self.learning_rate))
print('epoch %i, seq %i/%i, train loss %f '
'lr: %f' % \
(epoch, idx + 1, n_train, this_train_loss,
self.learning_rate))
self.learning_rate *= self.learning_rate_decay
f.close()
def test_real():
""" Test RNN with real-valued outputs. """
n_hidden = 200
n_in = 20
n_out = 5
n_steps = 10
n_seq = 100
np.random.seed(0)
# simple lag test
seq = np.random.randn(n_seq, n_steps, n_in)
targets = np.zeros((n_seq, n_steps, n_out))
targets[:, 1:, 0] = seq[:, :-1, 3] # delayed 1
targets[:, 1:, 1] = seq[:, :-1, 2] # delayed 1
targets[:, 2:, 2] = seq[:, :-2, 0] # delayed 2
targets += 0.01 * np.random.standard_normal(targets.shape)
model = MetaRNN(n_in=n_in, n_hidden=n_hidden, n_out=n_out,
learning_rate=0.001, learning_rate_decay=0.999,
n_epochs=400, activation='tanh')
model.fit(seq, targets, validation_frequency=1000)
[seqNum,lineNum,colNum] = targets.shape
print(seqNum,lineNum,colNum)
error = [0 for i in range(colNum)]
plt.close('all')
fig = plt.figure()
ax1 = plt.subplot(211)
plt.plot(seq[0])
ax1.set_title('input')
ax2 = plt.subplot(212)
true_targets = plt.plot(targets[0])
guess = model.predict(seq[0])
guessed_targets = plt.plot(guess, linestyle='--')
for i, x in enumerate(guessed_targets):
x.set_color(true_targets[i].get_color())
ax2.set_title('solid: true output, dashed: model output')
dif = abs(guess - targets[0])
[linedif,coldif] = dif.shape
print(linedif,coldif)
errorsum = 0
for i in range (colNum):
sum = 0
for j in range (lineNum):
sum += dif[j][i] ** 2
error[i] = math.sqrt(sum/lineNum)
errorsum += error[i]
print(error[i])
print("average error = ", errorsum/colNum)
def test_binary(multiple_out=False, n_epochs=250):
""" Test RNN with binary outputs. """
n_hidden = 50
n_in = 4
n_out = 30
n_steps = 20
n_seq = 300
np.random.seed(0)
# simple lag test
seqlist = []
count = 0
data = []
BASE_DIR = os.path.dirname(__file__)
file_path1 = os.path.join(BASE_DIR,"traindata/inputdata-b03-300-20.txt")
for l in open(file_path1):
#for l in open("inputdata-b02-300-10.txt"):
count += 1
row = [int(x) for x in l.split()]
if len(row) > 0:
data.append(row)
if (count == n_steps):
count = 0
if len(data) >0:
seqlist.append(data)
data = []
seqarray = np.asarray(seqlist)
seq = seqarray[:,:,:n_in]
targets = seqarray[:,:,n_in:]
seqlistTest = []
count = 0
dataTest = []
file_path2 = os.path.join(BASE_DIR, 'testdata/inputdata-b03-100-20.txt')
for l in open(file_path2):
#for l in open("inputdata-b02-100-10.txt"):
count += 1
row = [int(x) for x in l.split()]
if len(row) > 0:
dataTest.append(row)
if (count == n_steps):
count = 0
if len(dataTest) >0:
seqlistTest.append(dataTest)
dataTest = []
seqarrayTest = np.asarray(seqlistTest)
seqTest = seqarrayTest[:,:,:n_in]
targetsTest = seqarrayTest[:,:,n_in:]
model = MetaRNN(n_in=n_in, n_hidden=n_hidden, n_out=n_out,
learning_rate=0.13, learning_rate_decay=1,
n_epochs=n_epochs, activation='tanh', output_type='binary')
#model.fit(seq, targets, validation_frequency=1000)
model.fit(seq, targets, seqTest, targetsTest, validation_frequency=1000)
ferror = file('errorRate/errorRate-b03-300-100-20.txt','a+')
[seqNum,lineNum,colNum] = targetsTest.shape
#print (seqTest.shape)
seqs = xrange(seqNum)
error = [0 for i in range(lineNum*seqNum)]
errorsum = 0
for k in seqs:
guess = model.predict_proba(seqTest[k])
dif = abs(guess - targetsTest[k])
[lineDif,colDif] = dif.shape
#print(lineDif,colDif)
for i in range (lineDif):
ki = k*lineDif+i
for j in range (colDif):
if (dif[i][j] > 0.5):
error[ki] += 1
ferror.write('error %d = %d \n' % (ki,error[ki]))
if (error[ki]>0):
errorsum += 1
print(errorsum)
errorRate = errorsum/1.0/seqNum/lineNum
ferror.write("average error = %f \n" % (errorRate))
## seqs = xrange(1)
##
## [seqNum,lineNum,colNum] = targets.shape
## print(seqNum,lineNum,colNum)
## error = [0 for i in range(colNum)]
##
## plt.close('all')
## for seq_num in seqs:
## fig = plt.figure()
## ax1 = plt.subplot(211)
## plt.plot(seq[seq_num])
## ax1.set_title('input')
## ax2 = plt.subplot(212)
## true_targets = plt.step(xrange(n_steps), targets[seq_num], marker='o')
##
## guess = model.predict_proba(seq[seq_num])
## guessed_targets = plt.step(xrange(n_steps), guess)
## plt.setp(guessed_targets, linestyle='--', marker='d')
## for i, x in enumerate(guessed_targets):
## x.set_color(true_targets[i].get_color())
## ax2.set_ylim((-0.1, 1.1))
## ax2.set_title('solid: true output, dashed: model output (prob)')
##
##
## dif = abs(guess - targets[seq_num])
## [lineDif,colDif] = dif.shape
## print(lineDif,colDif)
## errorsum = 0
## for i in range (colNum):
## for j in range (lineNum):
## if (dif[j][i] > 0.5):
## error[i] += 1
## print(error[i])
## errorsum += error[i]
## print("average error = ", errorsum/colNum)
def test_softmax(n_epochs=250):
""" Test RNN with softmax outputs. """
n_hidden = 10
n_in = 5
n_steps = 10
n_seq = 100
n_classes = 3
n_out = n_classes # restricted to single softmax per time step
np.random.seed(0)
# simple lag test
seq = np.random.randn(n_seq, n_steps, n_in)
targets = np.zeros((n_seq, n_steps), dtype=np.int)
thresh = 0.5
# if lag 1 (dim 3) is greater than lag 2 (dim 0) + thresh
# class 1
# if lag 1 (dim 3) is less than lag 2 (dim 0) - thresh
# class 2
# if lag 2(dim0) - thresh <= lag 1 (dim 3) <= lag2(dim0) + thresh
# class 0
targets[:, 2:][seq[:, 1:-1, 3] > seq[:, :-2, 0] + thresh] = 1
targets[:, 2:][seq[:, 1:-1, 3] < seq[:, :-2, 0] - thresh] = 2
#targets[:, 2:, 0] = np.cast[np.int](seq[:, 1:-1, 3] > seq[:, :-2, 0])
model = MetaRNN(n_in=n_in, n_hidden=n_hidden, n_out=n_out,
learning_rate=0.001, learning_rate_decay=0.999,
n_epochs=n_epochs, activation='tanh',
output_type='softmax', use_symbolic_softmax=False)
model.fit(seq, targets, validation_frequency=1000)
seqs = xrange(10)
[seqNum,lineNum,colNum] = seq.shape
print(seqNum,lineNum,colNum)
error = [0 for i in range(colNum)]
plt.close('all')
for seq_num in seqs:
fig = plt.figure()
ax1 = plt.subplot(211)
plt.plot(seq[seq_num])
ax1.set_title('input??')
ax2 = plt.subplot(212)
# blue line will represent true classes
true_targets = plt.step(xrange(n_steps), targets[seq_num], marker='o')
# show probabilities (in b/w) output by model
guess = model.predict_proba(seq[seq_num])
guessed_probs = plt.imshow(guess.T, interpolation='nearest',
cmap='gray')
ax2.set_title('blue: true class, grayscale: probs assigned by model')
dif = abs(seq[seq_num] - targets[seq_num])
for i in range (colNum):
sum = 0
for j in range (lineNum):
sum += dif[i,j] ** 2
error[i] = math.sqrt(sum/lineNum)
print(error[i])
if __name__ == "__main__":
##logging.basicConfig(
## level = logging.INFO,
## format = 'LINE %(lineno)-4d %(levelname)-8s %(message)s',
## datafmt = '%m-%d %H:%M',
## filename = "D:/logresult20160123/one.log",
## filemode = 'w')
t0 = time.time()
#test_real()
# problem takes more epochs to solve
test_binary(multiple_out=True, n_epochs=90)
#test_softmax(n_epochs=250)
print ("Elapsed time: %f" % (time.time() - t0))
| mengyun1993/RNN-binary | rnn03.py | Python | bsd-3-clause | 27,063 |
from __future__ import unicode_literals
import unittest
import json, os, shutil, subprocess
import bench
import bench.utils
import bench.app
import bench.config.common_site_config
import bench.cli
bench.cli.from_command_line = True
class TestBenchInit(unittest.TestCase):
def setUp(self):
self.benches_path = "."
self.benches = []
def tearDown(self):
for bench_name in self.benches:
bench_path = os.path.join(self.benches_path, bench_name)
if os.path.exists(bench_path):
shutil.rmtree(bench_path, ignore_errors=True)
def test_init(self, bench_name="test-bench", **kwargs):
self.init_bench(bench_name, **kwargs)
self.assert_folders(bench_name)
self.assert_virtual_env(bench_name)
self.assert_common_site_config(bench_name, bench.config.common_site_config.default_config)
self.assert_config(bench_name)
self.assert_socketio(bench_name)
def test_multiple_benches(self):
# 1st bench
self.test_init("test-bench-1")
self.assert_common_site_config("test-bench-1", {
"webserver_port": 8000,
"socketio_port": 9000,
"redis_queue": "redis://localhost:11000",
"redis_socketio": "redis://localhost:12000",
"redis_cache": "redis://localhost:13000"
})
# 2nd bench
self.test_init("test-bench-2")
self.assert_common_site_config("test-bench-2", {
"webserver_port": 8001,
"socketio_port": 9001,
"redis_queue": "redis://localhost:11001",
"redis_socketio": "redis://localhost:12001",
"redis_cache": "redis://localhost:13001"
})
def test_new_site(self):
self.init_bench('test-bench')
self.new_site("test-site-1.dev")
def new_site(self, site_name):
new_site_cmd = ["bench", "new-site", site_name, "--admin-password", "admin"]
# set in travis
if os.environ.get("TRAVIS"):
new_site_cmd.extend(["--mariadb-root-password", "travis"])
subprocess.check_output(new_site_cmd, cwd=os.path.join(self.benches_path, "test-bench"))
site_path = os.path.join(self.benches_path, "test-bench", "sites", site_name)
self.assertTrue(os.path.exists(site_path))
self.assertTrue(os.path.exists(os.path.join(site_path, "private", "backups")))
self.assertTrue(os.path.exists(os.path.join(site_path, "private", "files")))
self.assertTrue(os.path.exists(os.path.join(site_path, "public", "files")))
site_config_path = os.path.join(site_path, "site_config.json")
self.assertTrue(os.path.exists(site_config_path))
with open(site_config_path, "r") as f:
site_config = json.loads(f.read())
for key in ("db_name", "db_password"):
self.assertTrue(key in site_config)
self.assertTrue(site_config[key])
def test_get_app(self):
site_name = "test-site-2.dev"
self.init_bench('test-bench')
self.new_site(site_name)
bench_path = os.path.join(self.benches_path, "test-bench")
bench.app.get_app("https://github.com/frappe/frappe-client", bench_path=bench_path)
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", "frappeclient")))
def test_install_app(self):
site_name = "test-site-3.dev"
self.init_bench('test-bench')
self.new_site(site_name)
bench_path = os.path.join(self.benches_path, "test-bench")
# get app
bench.app.get_app("https://github.com/frappe/erpnext", "develop", bench_path=bench_path)
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", "erpnext")))
# install app
bench.app.install_app("erpnext", bench_path=bench_path)
# install it to site
subprocess.check_output(["bench", "--site", site_name, "install-app", "erpnext"], cwd=bench_path)
out = subprocess.check_output(["bench", "--site", site_name, "list-apps"], cwd=bench_path)
self.assertTrue("erpnext" in out)
def test_remove_app(self):
self.init_bench('test-bench')
bench_path = os.path.join(self.benches_path, "test-bench")
# get app
bench.app.get_app("https://github.com/frappe/erpnext", "develop", bench_path=bench_path)
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", "erpnext")))
# remove it
bench.app.remove_app("erpnext", bench_path=bench_path)
self.assertFalse(os.path.exists(os.path.join(bench_path, "apps", "erpnext")))
def test_switch_to_branch(self):
self.init_bench('test-bench')
bench_path = os.path.join(self.benches_path, "test-bench")
app_path = os.path.join(bench_path, "apps", "frappe")
bench.app.switch_branch(branch="master", apps=["frappe"], bench_path=bench_path, check_upgrade=False)
out = subprocess.check_output(['git', 'status'], cwd=app_path)
self.assertTrue("master" in out)
# bring it back to develop!
bench.app.switch_branch(branch="develop", apps=["frappe"], bench_path=bench_path, check_upgrade=False)
out = subprocess.check_output(['git', 'status'], cwd=app_path)
self.assertTrue("develop" in out)
def init_bench(self, bench_name, **kwargs):
self.benches.append(bench_name)
bench.utils.init(bench_name, **kwargs)
def test_drop_site(self):
self.init_bench('test-bench')
# Check without archive_path given to drop-site command
self.drop_site("test-drop-without-archive-path")
# Check with archive_path given to drop-site command
home = os.path.abspath(os.path.expanduser('~'))
archived_sites_path = os.path.join(home, 'archived_sites')
self.drop_site("test-drop-with-archive-path", archived_sites_path=archived_sites_path)
def drop_site(self, site_name, archived_sites_path=None):
self.new_site(site_name)
drop_site_cmd = ['bench', 'drop-site', site_name]
if archived_sites_path:
drop_site_cmd.extend(['--archived-sites-path', archived_sites_path])
if os.environ.get('TRAVIS'):
drop_site_cmd.extend(['--root-password', 'travis'])
bench_path = os.path.join(self.benches_path, 'test-bench')
try:
subprocess.check_output(drop_site_cmd, cwd=bench_path)
except subprocess.CalledProcessError as err:
print err.output
if not archived_sites_path:
archived_sites_path = os.path.join(bench_path, 'archived_sites')
self.assertTrue(os.path.exists(archived_sites_path))
self.assertTrue(os.path.exists(os.path.join(archived_sites_path, site_name)))
else:
self.assertTrue(os.path.exists(archived_sites_path))
self.assertTrue(os.path.exists(os.path.join(archived_sites_path, site_name)))
def assert_folders(self, bench_name):
for folder in bench.utils.folders_in_bench:
self.assert_exists(bench_name, folder)
self.assert_exists(bench_name, "sites", "assets")
self.assert_exists(bench_name, "apps", "frappe")
self.assert_exists(bench_name, "apps", "frappe", "setup.py")
def assert_virtual_env(self, bench_name):
bench_path = os.path.abspath(bench_name)
python = os.path.join(bench_path, "env", "bin", "python")
python_path = bench.utils.get_cmd_output('{python} -c "import os; print os.path.dirname(os.__file__)"'.format(python=python))
# part of bench's virtualenv
self.assertTrue(python_path.startswith(bench_path))
self.assert_exists(python_path)
self.assert_exists(python_path, "site-packages")
self.assert_exists(python_path, "site-packages", "IPython")
self.assert_exists(python_path, "site-packages", "pip")
site_packages = os.listdir(os.path.join(python_path, "site-packages"))
self.assertTrue(any(package.startswith("MySQL_python-1.2.5") for package in site_packages))
def assert_config(self, bench_name):
for config, search_key in (
("redis_queue.conf", "redis_queue.rdb"),
("redis_socketio.conf", "redis_socketio.rdb"),
("redis_cache.conf", "redis_cache.rdb")):
self.assert_exists(bench_name, "config", config)
with open(os.path.join(bench_name, "config", config), "r") as f:
f = f.read().decode("utf-8")
self.assertTrue(search_key in f)
def assert_socketio(self, bench_name):
self.assert_exists(bench_name, "node_modules")
self.assert_exists(bench_name, "node_modules", "socket.io")
def assert_common_site_config(self, bench_name, expected_config):
common_site_config_path = os.path.join(bench_name, 'sites', 'common_site_config.json')
self.assertTrue(os.path.exists(common_site_config_path))
config = self.load_json(common_site_config_path)
for key, value in expected_config.items():
self.assertEquals(config.get(key), value)
def assert_exists(self, *args):
self.assertTrue(os.path.exists(os.path.join(*args)))
def load_json(self, path):
with open(path, "r") as f:
return json.loads(f.read().decode("utf-8"))
| yashodhank/bench | bench/tests/test_init.py | Python | gpl-3.0 | 8,306 |
from flask_wtf import Form
from wtforms import TextField, DecimalField, TextAreaField, DateField, validators, PasswordField, BooleanField
class CommentForm(Form):
text = TextField('Title', [validators.Required()])
text2 = TextAreaField('Body')
longitude = DecimalField('Longitude')
latitude = DecimalField('Longitude')
date = DateField('Date')
class SignupForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required(), validators.EqualTo('confirm', message='Passwords must match')])
confirm = PasswordField('Confirm Password', [validators.Required()])
email = TextField('eMail', [validators.Required(),validators.Email()])
#accept_tos = BooleanField('I accept the TOS', [validators.Required])
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
class PasswordResetForm(Form):
username = TextField('Username')
email = TextField('eMail')
class PasswordChangeForm(Form):
password = PasswordField('Password', [validators.Required()])
| homoludens/EventMap | hello/forms.py | Python | agpl-3.0 | 1,156 |
from rest_framework import serializers
from organization.models import Organization, Register, Comment
# future expansion to support multiple registers per organization
class RegisterSerializer(serializers.ModelSerializer):
class Meta:
model = Register
fields = ('id', 'name')
# future expansion to enable displaying of all ratings/comments on organization list page
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('rating', 'message')
class OrganizationSerializer(serializers.HyperlinkedModelSerializer):
registers = serializers.StringRelatedField(many=True)
comments = CommentSerializer(many=True, read_only=True)
class Meta:
model = Organization
fields = (
'id',
'name',
'verified',
'accepts_email',
'accepts_mail',
'address_line_one',
'address_line_two',
'postal_code',
'country',
'email_address',
'registers',
'average_rating',
'amount_ratings',
'comments',
)
depth = 2
| sakset/getyourdata | getyourdata/api/serializers.py | Python | mit | 1,178 |
import numpy as np
import sklearn.datasets as skdata
from tfs.dataset import data_tool as dtool
from tfs.dataset.base import Dataset
# TODO:
# the following is not wrapped
#
# 1. multilabel
# 2. manifold learning
# 3. decomposition
# 4. biclustering
#
# real dataset:
#
# - The Olivetti faces dataset
# - The 20 newsgroups text dataset
# - mldata.org repository
# - The Labeled Faces in the Wild face recognition dataset
# - Forest covertypes
# - RCV1 dataset
# - Boston House Prices dataset
# - Breast Cancer Wisconsin (Diagnostic) Database
# - Diabetes dataset
# - Optical Recognition of Handwritten Digits Data Set
# - Iris Plants Database
# - Linnerrud dataset
class SKData(Dataset):
def __init__(self,**kwargs):
self.setup(**kwargs)
super(SKData,self).__init__(None)
def setup(self):
raise NotImplementedError()
def load_train_test(self):
n = self._x.shape[0]
idx = np.arange(n)
te_idx,tr_idx = dtool.split(idx,[self._test_percent])
return self._x[tr_idx],self._y[tr_idx],self._x[te_idx],self._y[tr_idx]
class MakeBlobs(SKData):
def __init__(self,**kwargs):
super(MakeBlobs,self).__init__(**kwargs)
def setup(self, test_percent = 0.3, n_samples=100, n_features=2, centers=3,
cluster_std=1.0, center_box=(-10.0, 10.0), shuffle=True,
random_state=None ):
X, y = skdata.make_blobs(
n_samples,
n_features,
centers,
cluster_std,
center_box,
shuffle,
random_state
)
self._test_percent = test_percent
self._x = X
self._y = y
class MakeClassification(SKData):
def __init__(self,**kwargs):
super(MakeClassification,self).__init__(**kwargs)
def setup(self,test_percent = 0.3, n_samples=100, n_features=20,
n_informative=2, n_redundant=2, n_repeated=0, n_classes=2,
n_clusters_per_class=2, weights=None, flip_y=0.01, class_sep=1.0,
hypercube=True, shift=0.0, scale=1.0, shuffle=True,
random_state=None):
X, y = skdata.make_classification(
n_samples, n_features, n_informative, n_redundant,
n_repeated, n_classes, n_clusters_per_class, weights, flip_y,
class_sep, hypercube, shift, scale, shuffle, random_state
)
self._test_percent = test_percent
self._x = X
self._y = y
class MakeRegression(SKData):
def __init__(self,**kwargs):
super(MakeRegression,self).__init__(**kwargs)
def setup(self,test_percent = 0.3, n_samples=100, n_features=100,
n_informative=10, n_targets=1, bias=0.0, effective_rank=None,
tail_strength=0.5, noise=0.0, shuffle=True, coef=False, random_state=None):
X, y = skdata.make_regression(
n_samples, n_features, n_informative, n_targets, bias, effective_rank,
tail_strength, noise, shuffle, coef, random_state
)
self._test_percent = test_percent
self._x = X
self._y = y
class SKDataNoTest(Dataset):
def __init__(self,**kwargs):
self.setup(**kwargs)
super(SKDataNoTest,self).__init__(None)
def setup(self):
raise NotImplementedError()
def load_train_test(self):
return self._x,self._y,self._x.copy(),self._y.copy()
class Iris(SKDataNoTest):
def setup(self):
iris = skdata.load_iris()
self._x = iris.data
self._y = iris.target
| crackhopper/TFS-toolbox | tfs/dataset/skdata.py | Python | mit | 3,278 |
from datetime import datetime, timedelta
| ourbest/sns_app | utils/scheduler.py | Python | lgpl-3.0 | 41 |
#!/usr/bin/env python
import requests
import cgi
## Extract qty if it exists
arguments = cgi.FieldStorage()
qty = None
if arguments != None:
qty = arguments.getvalue("qty")
if qty == None:
qty = 1
url = "https://coinbase.com/api/v1/prices/buy?qty=" + str(qty)
r = requests.get(url)
jsonResponse = "{\"error\": \"There was an error fetching the bitcoin price\"}"
if r.status_code == 200:
jsonResponse = r.text
print "Content-Type: application/json"
print
print jsonResponse
| ravikiranj/rkjanardhana-dot-com | bitcoinPrice.py | Python | bsd-2-clause | 512 |
# ANI2012A17_Logic_Python.py | Programmation Python avec Maya | coding=utf-8
# Exemples de structures de contrôle logiques.
# variables
condition = True
number = 0
# 1. branchement conditionnel sans alternative
# le bloc d'instruction est exécuté seulement si la condition est vraie.
if condition:
print "branchement 1.2"
# 2. branchement conditionnel avec alternative
if condition:
print "branchement 2.1"
else:
print "branchement 2.2"
# 3. branchement conditionnel avec alternative sous forme compacte
condition = False if condition == True else True
if condition:
print "branchement 3.1"
else:
print "branchement 3.2"
# 4. séquence de branchements conditionnels
if condition:
print "branchement 4.1"
elif number == 0:
print "branchement 4.2"
else:
print "branchement 4.3"
# 5. branchements conditionnels multiples
# Python n'a pas d'opérateur switch
# cependant on peu le simuler avec un dictionnaire de clés et de valeurs
switch = {
1: "branchement 5.1",
2: "branchement 5.2",
3: "branchement 5.3"}
print switch.get(number, "branchement 5.4")
| philvoyer/ANI2012A17 | Module07/EXE02/ANI2012A17_Logic_Python.py | Python | mit | 1,153 |
#!/usr/bin/env python
"""
Finds and prints coordinates of a required block, try with:
<worldpath> <x> <z> <range> <block>
./block_finder.py ./MyWorld 1 1 2 spawner
"""
import os, sys
# local module
try:
import nbt
except ImportError:
# nbt not in search path. Let's see if it can be found in the parent folder
extrasearchpath = os.path.realpath(os.path.join(__file__,os.pardir,os.pardir))
if not os.path.exists(os.path.join(extrasearchpath,'nbt')):
raise
sys.path.append(extrasearchpath)
import nbt
def main(world_folder, chunkx, chunkz, chunkrange, block):
world = nbt.world.WorldFolder(world_folder)
if not isinstance(world, nbt.world.AnvilWorldFolder):
print("%s is not an Anvil world" % (world_folder))
return 65 # EX_DATAERR
fromchunkx = chunkx - chunkrange
fromchunkz = chunkz - chunkrange
tochunkx = chunkx + chunkrange
tochunkz = chunkz + chunkrange
print("Preparing to scan chunk from %i:%i to chunk %i:%i for a %s" % (fromchunkx, fromchunkz, tochunkx, tochunkz, block))
try:
for chunkx in range(fromchunkx, tochunkx):
for chunkz in range(fromchunkz, tochunkz):
# print("Scanning chunk %i:%i" % (chunkx, chunkz))
chunk = world.get_chunk(chunkx, chunkz)
for z in range(0, 16):
for x in range(0, 16):
for height in range(0, 255):
b = chunk.get_block(x, height, z)
if b != None and b == block:
blockx = (chunkx * 16 + x)
blockz = (chunkz * 16 + z)
print("%s found at %i:%i:%i" % (b, blockx, height, blockz))
except KeyboardInterrupt:
print('Keyboard interrupt!')
return 75 # EX_TEMPFAIL
return 0
def usage(message=None, appname=None):
if appname == None:
appname = os.path.basename(sys.argv[0])
print("Usage: %s WORLD_FOLDER CHUNK-X CHUNK-Z RANGE BLOCK" % appname)
if message:
print("%s: error: %s" % (appname, message))
if __name__ == '__main__':
if (len(sys.argv) != 6):
usage()
sys.exit(64) # EX_USAGE
world_folder = sys.argv[1]
try:
chunkx = int(sys.argv[2])
except ValueError:
usage('Chunk X-coordinate should be an integer')
sys.exit(64) # EX_USAGE
try:
chunkz = int(sys.argv[3])
except ValueError:
usage('Chunk Z-coordinate should be an integer')
sys.exit(64) # EX_USAGE
try:
chunkrange = int(sys.argv[4])
except ValueError:
usage('Chunk range should be an integer')
sys.exit(64) # EX_USAGE
try:
block = str(sys.argv[5])
except ValueError:
usage('Block should be an string')
sys.exit(64) # EX_USAGE
# clean path name, eliminate trailing slashes:
world_folder = os.path.normpath(world_folder)
if (not os.path.exists(world_folder)):
usage("No such folder as "+world_folder)
sys.exit(72) # EX_IOERR
sys.exit(main(world_folder, chunkx, chunkz, chunkrange, block))
| macfreek/NBT | examples/block_finder.py | Python | mit | 3,182 |
# Copyright (c) 2010 Arjan Scherpenisse
# See LICENSE for details.
# -*- test-case-name: sparked.test.test_events -*-
"""
Classes which define a generic event system.
"""
from twisted.python import log
from twisted.words.xish import utility
class EventDispatcher(utility.EventDispatcher):
"""
The sparked event dispatcher is simpler than the twisted version:
it does not use XPath arguments and its event prefix is always
empty.
This class exists to simplify the implementation of event
dispatchers in sparked without the syntactic sugar of xish'
EventDispatcher class.
It adds an extra feature: the possibility to give a parent
dispatcher using C{setEventParent} to which events will be
dispatched as well.
"""
parent = None
verbose = False
def __init__(self, eventprefix=""):
utility.EventDispatcher.__init__(self, eventprefix)
def dispatch(self, event, *arg, **kwarg):
"""
Dispatch the named event to all the callbacks.
"""
foundTarget = False
if self.verbose:
log.msg("%s --> %s: %s %s" % (repr(self), event, arg, kwarg))
self._dispatchDepth += 1
observers = self._eventObservers
priorities = observers.keys()
priorities.sort()
priorities.reverse()
emptyLists = []
for priority in priorities:
for query, callbacklist in observers[priority].iteritems():
if query == event:
callbacklist.callback(*arg, **kwarg)
foundTarget = True
if callbacklist.isEmpty():
emptyLists.append((priority, query))
for priority, query in emptyLists:
del observers[priority][query]
self._dispatchDepth -= 1
# If this is a dispatch within a dispatch, don't
# do anything with the updateQueue -- it needs to
# wait until we've back all the way out of the stack
if self._dispatchDepth == 0:
# Deal with pending update operations
for f in self._updateQueue:
f()
self._updateQueue = []
if self.parent:
self.parent.dispatch(event, *arg, **kwarg)
return foundTarget
def setEventParent(self, p):
"""
Set a parent to which events will be dispatched as well.
"""
self.parent = p
def disownEventParent(self):
"""
Unparent this event dispatcher.
"""
self.parent = None
| arjan/sparked | sparked/events.py | Python | mit | 2,560 |
"""
scraping
the utility functions for the actual web scraping
"""
import ssl
import datetime
import requests
import re
# this is the endpoint that my new version of this program will
# abuse with possible store ids. this is a much more reliable "darts at the wall"
# technique than the previous location-based one
QUERY_URL = "https://www.wawa.com/Handlers/LocationByStoreNumber.ashx"
# from testing, I have confirmed certain "series" of store IDs
# 0000 series are all old stores in PA, NJ, MD, DE, and VA
# 5000 series are all stores in FL
# 8000 series are all new stores in PA, NJ, MD, DE, and VA
POSSIBLE_STORE_NUMS = list(range(5000, 6000))
POSSIBLE_STORE_NUMS.extend(list(range(0, 1000)))
POSSIBLE_STORE_NUMS.extend(list(range(8000, 9000)))
# currently only tracking these gas types to keep a consistent csv schema.
# other types are not consistent across all wawas
GAS_TYPES = ["diesel", "plus", "unleaded", "premium"]
def parse_gas_prices(in_location):
"""
Breaks open the json for the gas prices
:param in_location: The Wawa location we are looking at (dict)
:return: The gas price info (dict)
"""
out_data = {}
try:
fuel_data = in_location["fuelTypes"]
for ft in fuel_data:
lowered = ft["description"].lower()
if lowered in GAS_TYPES:
out_data[lowered + "_price"] = ft["price"]
# no gas sold at this Wawa
except KeyError:
for gt in GAS_TYPES:
out_data[gt + "_price"] = ""
return out_data
def camel_to_underscore(in_string):
"""
Basic function that converts a camel-cased word to use underscores
:param in_string: The camel-cased string (str)
:return: The underscore'd string (str)
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', in_string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_amenities(in_location):
"""
Breaks open the json for the amenities offered at the Wawa location
:param in_location: The Wawa location (dict)
:return: The amenity info (dict)
"""
out_data = {}
for amenity, value in in_location["amenities"].items():
out_data["has_" + camel_to_underscore(amenity).lower()] = value
return out_data
def get_addresses(in_location):
"""
Parses info for the Wawa address and coordinates
:param in_location: The Wawa location (dict)
:return: The address and coordincate info (dict)
"""
friendly = in_location["addresses"][0]
physical = in_location["addresses"][1]
out_friendly = {
"address": friendly["address"],
"city": friendly["city"],
"state": friendly["state"],
"zip": friendly["zip"]
}
out_physical = {
"longitude": physical["loc"][1],
"latitude": physical["loc"][0],
}
return {"address": out_friendly, "coordinates": out_physical}
def get_wawa_data(limit=None):
"""
Hits the store number url endpoint to pull down Wawa locations and
parse each one's information. We don't know the store numbers as there
is not list of store numbers. Through testing I was able to narrow down
"series" of store numbers, so we iterate through ranges of possible
store numbers, skipping any 404 errors (invalid store id responses
returned by url calls).
:param limit: A cap on the number of Wawa results returned (int) (optional)
:return: Parsed Wawa information (list<dict>)
"""
ssl._create_default_https_context = ssl._create_unverified_context
output = []
for i in POSSIBLE_STORE_NUMS:
response = requests.get(QUERY_URL, params={"storeNumber": i})
if response.status_code != 404:
location = response.json()
geographic_data = get_addresses(location)
address = geographic_data["address"]
coordinates = geographic_data["coordinates"]
gas_prices = parse_gas_prices(location)
amenities = parse_amenities(location)
this_location_output = {
"has_menu": location["hasMenu"],
"last_updated": datetime.datetime.strptime(location["lastUpdated"], "%m/%d/%Y %I:%M %p"),
"location_id": location["locationID"],
"open_24_hours": location["open24Hours"],
"regional_director": location["regionalDirector"],
"store_close": location["storeClose"],
"store_name": location["storeName"],
"store_number": location["storeNumber"],
"store_open": location["storeOpen"],
"telephone": location["telephone"]
}
this_location_output = {**this_location_output, **address}
this_location_output = {**this_location_output, **coordinates}
this_location_output = {**this_location_output, **gas_prices}
this_location_output = {**this_location_output, **amenities}
output.append(this_location_output)
if limit and len(output) == limit:
break
return output
| cfh294/WawaGeoScraper | utils/scraping/__init__.py | Python | gpl-3.0 | 5,149 |
"""
Copyright (c) 2011-2015 Nathan Boley
This file is part of GRIT.
GRIT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
GRIT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GRIT. If not, see <http://www.gnu.org/licenses/>.
"""
import os, sys
import cPickle
import math
import random
import numpy
from scipy.special import gammaln, gamma, cbrt
import scipy.stats
from itertools import chain
import config
import grit.files.junctions
import grit.files.reads
from grit.lib.multiprocessing_utils import ProcessSafeOPStream
from grit.call_peaks_support_fns import calc_moments
from scipy.optimize import fmin_l_bfgs_b as minimize
""" Tuneable config options - should be set by caller
MIN_RD_CNT = 5
MIN_PEAK_SIZE = 5
MAX_PEAK_SIZE = 500
TRIM_FRACTION = 0.01
MAX_EXP_SUM_FRACTION = 0.05
MAX_EXP_MEAN_CVG_FRACTION = MAX_EXP_SUM_FRACTION/10
"""
VERBOSE = False
DEBUG_VERBOSE = False
MIN_EMPTY_REGION_SIZE = 1
BACKGROUND_FRACTION = 0.01
MIN_NOISE_FRAC = 0.01
SMOOTH_WIN_LEN = 10
SPLIT_TYPE = 'optimal' # 'random' other option
MAX_NUM_ITERATIONS = 25
N_REPS = 1
if SPLIT_TYPE == 'random': assert N_REPS > 1
def write_bedgraph_from_array(array, region, ofprefix):
"""
track name=CAGE.pan..plus type=bedGraph
chr4 89932 89933 4.00
chr4 89955 89956 2.00
chr4 89958 89959 2.00
"""
chrm = region['chrm']
start = region['start']
ofname = "%s.%s.bedgraph" % (
ofprefix, {'+': 'plus', '-': 'minus'}[region['strand']])
with open(ofname, 'w') as ofp:
print >> ofp, "track name=%s type=bedGraph" % ofname
for i, val in enumerate(array):
if val < 1e-6: continue
print >> ofp, "\t".join(
('chr' + chrm, str(start+i), str(start+i+1), "%.2f" % val))
return
def write_bedgraph(chrm, peaks, ofp):
"""
track name=CAGE.pan..plus type=bedGraph
chr4 89932 89933 4.00
chr4 89955 89956 2.00
chr4 89958 89959 2.00
"""
for start, stop, value in peaks:
ofp.write( "\t".join(
('chr'+chrm, str(start), str(stop+1), "%.2f" % value)) + "\n")
return
def build_false_signal(rnaseq_reads, signal_type):
signal_type = '5p'
assert signal_type in ('5p', '3p')
# get the read start coverage
signal_cov = numpy.zeros(region['stop']-region['start']+1, dtype=float)
for rd1, rd2 in rnaseq_reads.iter_paired_reads(**region):
if signal_type == '3p':
pos = max(rd1.pos, rd1.aend, rd2.pos, rd2.aend)
else:
pos = min(rd1.pos, rd1.aend, rd2.pos, rd2.aend)
if pos < region['start'] or pos > region['stop']: continue
signal_cov[pos-region['start']] += 1
n_rnaseq_reads = signal_cov.sum()
# add the uniform background
signal_cov = (1-BACKGROUND_FRACTION)*signal_cov+(
n_rnaseq_reads*BACKGROUND_FRACTION)/len(signal_cov)
def build_control(rnaseq_reads, region, control_type, smooth_win_len=SMOOTH_WIN_LEN):
assert control_type in ('5p', '3p')
# get the read start coverage
cov = numpy.zeros(region['stop']-region['start']+1, dtype=float)
for rd1, rd2 in rnaseq_reads.iter_paired_reads(**region):
if control_type == '3p':
pos = max(rd1.pos, rd1.aend, rd2.pos, rd2.aend)
else:
pos = min(rd1.pos, rd1.aend, rd2.pos, rd2.aend)
if pos < region['start'] or pos > region['stop']: continue
cov[pos-region['start']] += 1
n_rnaseq_reads = cov.sum()
# add the uniform background
cov = (1-BACKGROUND_FRACTION)*cov+(
n_rnaseq_reads*BACKGROUND_FRACTION)/len(cov)
# get the region segment boundaries
region_tuple = (region['chrm'], region['strand'], region['start'], region['stop'])
jns = files.junctions.load_junctions_in_bam(
rnaseq_reads, [region_tuple,] )[(region['chrm'], region['strand'])]
bndries = set((region['start']-region['start'], region['stop']-region['start']+1))
for (start, stop), cnt, entropy in jns:
bndries.add(start-region['start'])
bndries.add(stop-region['start'])
bndries = sorted(bndries)
# smooth the signal in each segment
min_signal = n_rnaseq_reads*BACKGROUND_FRACTION/len(cov)
window = numpy.ones(smooth_win_len, dtype=float)/smooth_win_len
for start, stop in zip(bndries[:-1], bndries[1:]):
segment_signal = cov[start:stop]
if stop - start <= smooth_win_len:
cov[start:stop] = segment_signal.mean()
else:
cov[start:stop] = numpy.convolve(
window,segment_signal,mode='same')
#cov[cov < min_signal] = min_signal
return (cov + 1e-12)/(cov.sum() + 1e-12*len(cov))
def build_control_in_gene_regions(
gene, rnaseq_reads, control_type, smooth_win_len=SMOOTH_WIN_LEN):
assert control_type in ('5p', '3p')
# get the read start coverage
cov = numpy.zeros(gene.stop-gene.start+1, dtype=float)
window = numpy.ones(smooth_win_len, dtype=float)/smooth_win_len
for x in gene.regions:
seg_cov = rnaseq_reads.build_read_coverage_array(
gene.chrm, gene.strand, x.start, x.stop )
if len(seg_cov) <= smooth_win_len:
seg_cov = seg_cov.mean()
else:
seg_cov = numpy.convolve(
window, seg_cov, mode='same')
cov[x.start-gene.start:x.stop-gene.start+1] = seg_cov
return (cov + 1e-12)/(cov.sum() + 1e-12*len(cov))
def build_control_in_gene(gene, paired_rnaseq_reads, bndries,
control_type, smooth_win_len=SMOOTH_WIN_LEN):
assert control_type in ('5p', '3p')
# get the read start coverage
cov = numpy.zeros(gene.stop-gene.start+1, dtype=float)
for rd_key, mappings in paired_rnaseq_reads:
for mapping in mappings:
poss = chain(chain(*mapping[4].cov_regions),
chain(*mapping[4].cov_regions))
if control_type == '3p':
pos = max(poss)
else:
pos = min(poss)
if pos < gene.start or pos > gene.stop: continue
cov[pos-gene.start] += mapping[-1]
n_rnaseq_reads = len(paired_rnaseq_reads)
# add the uniform background
cov = (1-BACKGROUND_FRACTION)*cov+(
n_rnaseq_reads*BACKGROUND_FRACTION)/len(cov)
# smooth the signal in each segment
min_signal = n_rnaseq_reads*BACKGROUND_FRACTION/len(cov)
window = numpy.ones(smooth_win_len, dtype=float)/smooth_win_len
for start, stop in zip(bndries[:-1], bndries[1:]):
segment_signal = cov[start-gene.start:stop-gene.start+1]
region_len = stop - start + 1
region_cnt = segment_signal.sum()
if ( region_cnt/region_len < 1./smooth_win_len
or region_len <= smooth_win_len ):
cov[start-gene.start:stop-gene.start+1] = region_cnt/region_len
else:
cov[start-gene.start:stop-gene.start+1] = numpy.convolve(
window,segment_signal,mode='same')
#cov[cov < min_signal] = min_signal
return (cov + 1e-12)/(cov.sum() + 1e-12*len(cov))
class TestSignificance(object):
def __init__(self, signal_cov, control_cov, noise_frac, min_peak_size):
self.noise_n = int(noise_frac*sum(signal_cov)) + 1
self.signal_n = sum(signal_cov)
self.min_peak_size = min_peak_size
#### initialize the array that we will use to pick
#### the split base(s)
self.split_statistic = signal_cov
x = numpy.diff( numpy.asarray(
signal_cov >= 1e-6, dtype=int ) )
stops = numpy.nonzero(x==1)[0].tolist()
if signal_cov[-1] < 1e-6: stops.append(len(x))
starts = (numpy.nonzero(x==-1)[0]+1).tolist()
if signal_cov[0] < 1e-6: starts.insert(0, 0)
self.zero_intervals = [
(start, stop) for start, stop in zip(starts, stops)
if stop - start + 1 >= MIN_EMPTY_REGION_SIZE ]
#### initialize data to test for region significance
# initialize the null data
null_means = [0.,]
null_vars = [0.,]
for i, p in enumerate(control_cov):
mean, var = calc_moments(p, self.noise_n)
null_means.append(mean)
null_vars.append(var)
self.null_means_cumsum = numpy.array(null_means).cumsum()
self.null_variances_cumsum = numpy.array(null_vars).cumsum()
# initialize the signal test statistic
lhds = ( signal_cov*numpy.log(control_cov)
- gammaln(1+signal_cov) )
self.signal_lhd_cumsum = numpy.hstack((
numpy.zeros(1), lhds.cumsum()))
self.signal_cnts_cumsum = numpy.hstack((
numpy.zeros(1), signal_cov.cumsum()))
def __call__(self, start, stop, alpha):
# if there are more reads in this region than noise reads,
# then this region must include some signal
sig_cnt = (
self.signal_cnts_cumsum[stop]
- self.signal_cnts_cumsum[start] )
if sig_cnt > self.noise_n: return True
mean = -(self.null_means_cumsum[stop]
- self.null_means_cumsum[start] + 1)
variance = ( self.null_variances_cumsum[stop]
- self.null_variances_cumsum[start] + 1)
scale = variance/mean
shape = mean/scale
dist = scipy.stats.gamma(shape, scale=scale)
critical_value = -scipy.stats.gamma(
shape, scale=scale).isf(alpha)
# calculate the value of the observed likelihood
obs_lhd = ( self.signal_lhd_cumsum[stop]
- self.signal_lhd_cumsum[start] )
return obs_lhd < critical_value
def find_split_bases(self, r_start, r_stop):
"""Returns a closed,open interval of bases to split.
"""
r_start += self.min_peak_size
r_stop -= self.min_peak_size
assert r_stop >= r_start
if SPLIT_TYPE == 'random':
rv = random.randint(r_start, r_stop)
return rv, rv
assert SPLIT_TYPE == 'optimal'
# find the largest zero interval
split_interval = None
for start, stop in self.zero_intervals:
if stop < r_start: continue
if start > r_stop: break
start = max(start, r_start)
stop = min(stop, r_stop)
if ( split_interval == None or
stop-start+1 > split_interval[1] - split_interval[0] ):
split_interval = (start, stop)
# if we found one, then use it. Otherwise, find the location with
# the minimum signal
if split_interval != None:
#diff = split_interval[1] - split_interval[0]
#return split_interval[0]+diff/2, split_interval[0]+diff/2
return split_interval[0], split_interval[1]+1
# find the bases that are the most below the mean
min_val = self.split_statistic[r_start:r_stop+1].min()
# find the indices of the minimum value
min_indices = (
self.split_statistic[r_start:r_stop+1] == min_val).nonzero()
#rv = random.choice(min_indices[0]) + r_start
rv = min_indices[0][0] + r_start
return rv, rv
def find_noise_regions(signal_cov, control_cov,
noise_frac, alpha, min_peak_size):
alpha = alpha/(2*len(signal_cov))
is_significant = TestSignificance(
signal_cov, control_cov, noise_frac, min_peak_size)
noise_regions = []
if signal_cov.sum() == 0:
return [(0, len(signal_cov)),]
# initialize the first region to split
# trim 0 count bases from the edges of the signal track
start, stop = 0, len(signal_cov)
for i, cnt in enumerate(signal_cov):
if cnt > 0: break
start = i
if start > 0: noise_regions.append((0, start))
for i in reversed(xrange(len(signal_cov))):
if signal_cov[i] > 0: break
stop = i
if stop < len(signal_cov): noise_regions.append((stop,len(signal_cov)))
regions_to_split = [((start, stop), 1)]
# if the full region isn't significant, then we are done
if not is_significant(*regions_to_split[0][0], alpha=alpha):
return noise_regions + [regions_to_split[0][0],]
while len(regions_to_split) > 0:
# get the region to split - we know that this is significant
# XXX use a better data structure
(start, stop), level = regions_to_split.pop(0)
# if this region is too small, then it's already significant
# and so there is nothing to do
if stop - start < 2*min_peak_size: continue
# build the sub regions, and test them for significance
left_bnd, right_bnd = is_significant.find_split_bases(start, stop)
# add the split bases to the noise set
if right_bnd > left_bnd:
noise_regions.append((left_bnd, right_bnd))
r1, r2 = [(start, left_bnd), (right_bnd, stop)]
r1_sig, r2_sig = [
is_significant(*r1, alpha=alpha),
is_significant(*r2, alpha=alpha) ]
# if neither sub region is significant, (and we know the parent region
# was significant) then we are done
if not r1_sig and not r2_sig:
continue
# add the subregions to the appropriate locations
if r1_sig:
regions_to_split.append((r1, level+1))
else: noise_regions.append(r1)
if r2_sig:
regions_to_split.append((r2, level+1))
else: noise_regions.append(r2)
return sorted(noise_regions)
def estimate_noise_frac(noise_regions, signal_cov, control_cov, min_noise_frac):
noise_cnt = sum(signal_cov[start:stop].sum()
for start, stop in noise_regions )
control_cnt = sum(control_cov[start:stop].sum()
for start, stop in noise_regions )
assert control_cnt <= 1.0+1e-6
expected_noise_cnt = (1./control_cnt)*noise_cnt
signal_cnt = signal_cov.sum()
# because this is a MOM estimate, it can lay out of the domain.
# however, this should only occur in insignificant genes
rv = min(1., expected_noise_cnt/(signal_cnt+1e-6))
return max(min_noise_frac, rv)
def update_control_cov_for_five_prime_bias(
noise_regions, noise_frac,
signal_cov, control_cov, reads_type):
# disable the correction
return (0.,1.), control_cov
positions = []
Ys = []
ps = []
max_pos = float(len(control_cov))
for start, stop in sorted(noise_regions):
for pos in xrange(start, stop):
positions.append(pos)
Ys.append(signal_cov[pos])
ps.append(control_cov[pos])
positions = numpy.array(positions, dtype=float)
Ys = numpy.array(Ys, dtype=float)
ps = numpy.array(ps, dtype=float)
def calc_new_ps(args, positions, ps):
alpha, power = args
if reads_type == '5p':
weights = (1 - positions/(max_pos+1))**power
elif reads_type == '3p':
weights = (positions/(max_pos+1))**power
else:
assert False
new_ps = (weights/weights.mean())*alpha*ps + (1-alpha)*ps
return new_ps/new_ps.sum()
def calc_lhd_for_reg_coef(args):
new_ps = calc_new_ps(args, positions, ps)
res = -(Ys*numpy.log(new_ps)).sum()
return res
res = minimize(
calc_lhd_for_reg_coef, x0=(0.1,1),
approx_grad=True, bounds=[(1e-6, 1-1e-6),(1,2)])
reg_coef = res[0].tolist()
return reg_coef, calc_new_ps(reg_coef, numpy.arange(max_pos), control_cov)
def merge_adjacent_intervals(
intervals, max_abs_merge_distance, max_merge_fraction, max_peak_size):
if len(intervals) == 0: return []
intervals.sort()
merged_intervals = [list(intervals[0]),]
prev_stop = merged_intervals[-1][1]
for start, stop in intervals[1:]:
max_merge_distance = max(
max_abs_merge_distance,
max_merge_fraction*(stop-start),
max_merge_fraction*(merged_intervals[-1][1]-merged_intervals[-1][0]))
if ( start - max_merge_distance - 1 <= prev_stop
and stop - start + 1 < max_peak_size ):
merged_intervals[-1][1] = stop
else:
merged_intervals.append([start, stop])
prev_stop = stop
return merged_intervals
def estimate_read_and_control_cov_in_gene(
gene, signal_reads, reads_type,
rnaseq_reads, alpha=0.01):
assert reads_type in ('promoter', 'polya')
reads_type = '5p' if reads_type == 'promoter' else '3p'
if gene.strand == '-':
reads_type = {'3p':'5p', '5p':'3p'}[reads_type]
signal_cov = gene.find_coverage(signal_reads)
if DEBUG_VERBOSE:
config.log_statement("Finished building signal coverage array")
#signal_cov = build_false_signal(rnaseq_reads, '5p')
control_cov = build_control_in_gene_regions(
gene, rnaseq_reads, reads_type, SMOOTH_WIN_LEN)
if DEBUG_VERBOSE:
config.log_statement("Finished building control coverage array")
return signal_cov, control_cov
def call_peaks( signal_cov, original_control_cov, reads_type,
gene,
alpha, min_noise_frac,
min_merge_size, min_rel_merge_size,
min_rd_cnt,
trim_fraction,
min_peak_size, max_peak_size,
max_exp_sum_fraction, max_exp_mean_cvg_fraction):
signal = numpy.ones(len(signal_cov))
for k in xrange(N_REPS):
noise_frac = 1.0
noise_regions = [(0, len(signal)),]
reg_coef, control_cov = \
update_control_cov_for_five_prime_bias(
noise_regions, noise_frac,
signal_cov, original_control_cov, reads_type)
for i in xrange(MAX_NUM_ITERATIONS):
if DEBUG_VERBOSE:
region = {'chrm': gene.chrm, 'strand': gene.strand,
'start': gene.start, 'stop': gene.stop}
write_bedgraph_from_array(
1000*control_cov, region, "control.%i"%i)
write_bedgraph_from_array(
signal_cov, region, "signal.%i"%i)
config.log_statement(
"Iter %i: Noise Frac %.2f%%\tReg Coef: %s" % (
i+1, noise_frac*100, reg_coef))
noise_regions = find_noise_regions(
signal_cov, control_cov,
noise_frac, alpha=alpha, min_peak_size=min_peak_size )
new_noise_frac = estimate_noise_frac(
noise_regions, signal_cov, control_cov, min_noise_frac)
new_reg_coef, control_cov = \
update_control_cov_for_five_prime_bias(
noise_regions, noise_frac,
signal_cov, original_control_cov, reads_type)
if noise_frac - new_noise_frac <= 1e-3 \
and abs(reg_coef[0] - new_reg_coef[0]) < 1e-3 \
and abs(reg_coef[1] - new_reg_coef[1]) < 1e-3:
break
else:
noise_frac = new_noise_frac
reg_coef = new_reg_coef
for start, stop in noise_regions:
signal[start:stop] -= 1./N_REPS
# build a list of inclusive peak starts and stops
peaks = []
nonzero_bases = (signal>1e-6).nonzero()[0].tolist()
if len(nonzero_bases) == 0: return peaks
curr_start = nonzero_bases.pop(0)
curr_stop = curr_start
for base in nonzero_bases:
if base == curr_stop+1:
curr_stop += 1
else:
peaks.append((curr_start, curr_stop))
curr_start, curr_stop = base, base
peaks.append((curr_start, curr_stop))
while True:
new_peaks = merge_adjacent_intervals(
peaks, min_merge_size, min_rel_merge_size, max_peak_size)
if len(new_peaks) == len(peaks):
peaks = new_peaks
break
else:
peaks = new_peaks
# trim peaks
new_peaks = []
for start, stop in peaks:
assert stop >= start
cov_region = signal_cov[start:stop+1]
total_cov = cov_region.sum()
cov_cumsum = cov_region.cumsum()-cov_region[0]
try: trim_start = numpy.flatnonzero(
cov_cumsum < int(trim_fraction*total_cov)).max()
except:
trim_start = 0
try: trim_stop = numpy.flatnonzero(
cov_cumsum > (1.0-trim_fraction)*total_cov).min()
except: trim_stop=len(cov_region)-1
while trim_start < len(cov_region)-1 and cov_region[trim_start] == 0:
trim_start += 1
while trim_stop > trim_start and cov_region[trim_stop] == 0:
trim_stop -= 1
new_peaks.append((trim_start+start,
trim_stop+start,
cov_region[trim_start:trim_stop+1].sum()))
# filter peaks
exp_filtered_peaks = []
max_peak_cnt = float(max(cnt for start, stop, cnt in new_peaks))
max_peak_mean_cnt = float(max(cnt/float(stop-start+1)
for start, stop, cnt in new_peaks))
for start, stop, cnt in new_peaks:
length = stop - start + 1
if (cnt >= min_rd_cnt
and length >= min_peak_size
and length <= max_peak_size
and cnt/max_peak_cnt > max_exp_sum_fraction
and (cnt/float(length))/max_peak_mean_cnt
> max_exp_mean_cvg_fraction ):
exp_filtered_peaks.append((start, stop, cnt))
return exp_filtered_peaks
| nboley/grit | grit/peaks.py | Python | gpl-3.0 | 22,194 |
from flask import Blueprint, request
a40323148_1 = Blueprint('a40323148_1', __name__, url_prefix='/a40323148_1', template_folder='templates')
head_str = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
'''
tail_str = '''
</script>
</body>
</html>
'''
chain_str = '''
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -250, 500, 500)
# 畫軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
deg = math.pi/180
# 將繪製鏈條輪廓的內容寫成 class 物件
class chain():
# 輪廓的外型設為 class variable
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
#chamber = "M 0, 0 L 0, -20 z"
cgoChamber = window.svgToCgoSVG(chamber)
def __init__(self, fillcolor="green", border=True, strokecolor= "tan", linewidth=2, scale=1):
self.fillcolor = fillcolor
self.border = border
self.strokecolor = strokecolor
self.linewidth = linewidth
self.scale = scale
# 利用鏈條起點與終點定義繪圖
def basic(self, x1, y1, x2, y2):
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
# 注意, cgo.Chamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": self.fillcolor,
"border": self.border,
"strokeColor": self.strokecolor,
"lineWidth": self.linewidth })
# hole 為原點位置
hole = cobj(shapedefs.circle(4*self.scale), "PATH")
cmbr.appendPath(hole)
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(math.atan2(y2-y1, x2-x1)/deg+90)
# 放大 scale 倍
cgo.render(basic1, x1, y1, self.scale, 0)
# 利用鏈條起點與旋轉角度定義繪圖, 使用內定的 color, border 與 linewidth 變數
def basic_rot(self, x1, y1, rot, v=False):
# 若 v 為 True 則為虛擬 chain, 不 render
self.x1 = x1
self.y1 = y1
self.rot = rot
self.v = v
# 注意, cgoChamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": self.fillcolor,
"border": self.border,
"strokeColor": self.strokecolor,
"lineWidth": self.linewidth })
# hole0 為原點位置
hole = cobj(shapedefs.circle(4*self.scale), "PATH")
cmbr.appendPath(hole)
# 根據旋轉角度, 計算 x2 與 y2
x2 = x1 + 20*math.cos(rot*deg)*self.scale
y2 = y1 + 20*math.sin(rot*deg)*self.scale
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(rot+90)
# 放大 scale 倍
if v == False:
cgo.render(basic1, x1, y1, self.scale, 0)
return x2, y2
'''
# 傳繪 A 函式內容
def a(x, y, scale=1, color="green"):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain(scale='''+str(scale)+''', fillcolor="'''+str(color)+'''")
# 畫 A
# 左邊兩個垂直單元
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 90)
x2, y2 = mychain.basic_rot(x1, y1, 90)
# 左斜邊兩個單元
x3, y3 = mychain.basic_rot(x2, y2, 80)
x4, y4 = mychain.basic_rot(x3, y3, 71)
# 最上方水平單元
x5, y5 = mychain.basic_rot(x4, y4, 0)
# 右斜邊兩個單元
x6, y6 = mychain.basic_rot(x5, y5, -71)
x7, y7 = mychain.basic_rot(x6, y6, -80)
# 右邊兩個垂直單元
x8, y8 = mychain.basic_rot(x7, y7, -90)
x9, y9 = mychain.basic_rot(x8, y8, -90)
# 中間兩個水平單元
x10, y10 = mychain.basic_rot(x8, y8, -180)
mychain.basic(x10, y10, x1, y1)
'''
return outstring
# 傳繪 B 函式內容
def b(x, y):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain()
# 畫 B
# 左邊四個垂直單元
# 每一個字元間隔為 65 pixels
#x1, y1 = mychain.basic_rot(0+ 65, 0, 90)
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 90)
x2, y2 = mychain.basic_rot(x1, y1, 90)
x3, y3 = mychain.basic_rot(x2, y2, 90)
x4, y4 = mychain.basic_rot(x3, y3, 90)
# 上方一個水平單元
x5, y5 = mychain.basic_rot(x4, y4, 0)
# 右斜 -30 度
x6, y6 = mychain.basic_rot(x5, y5, -30)
# 右上垂直向下單元
x7, y7 = mychain.basic_rot(x6, y6, -90)
# 右斜 240 度
x8, y8 = mychain.basic_rot(x7, y7, 210)
# 中間水平
mychain.basic(x8, y8, x2, y2)
# 右下斜 -30 度
x10, y10 = mychain.basic_rot(x8, y8, -30)
# 右下垂直向下單元
x11, y11 = mychain.basic_rot(x10, y10, -90)
# 右下斜 240 度
x12, y12 = mychain.basic_rot(x11, y11, 210)
# 水平接回起點
mychain.basic(x12,y12, '''+str(x)+","+str(y)+''')
'''
return outstring
# 傳繪 C 函式內容
def c(x, y):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain()
# 上半部
# 左邊中間垂直起點, 圓心位於線段中央, y 方向再向上平移兩個鏈條圓心距單位
#x1, y1 = mychain.basic_rot(0+65*2, -10+10+20*math.sin(80*deg)+20*math.sin(30*deg), 90)
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+'''-10+10+20*math.sin(80*deg)+20*math.sin(30*deg), 90)
# 上方轉 80 度
x2, y2 = mychain.basic_rot(x1, y1, 80)
# 上方轉 30 度
x3, y3 = mychain.basic_rot(x2, y2, 30)
# 上方水平
x4, y4 = mychain.basic_rot(x3, y3, 0)
# 下半部, 從起點開始 -80 度
#x5, y5 = mychain.basic_rot(0+65*2, -10+10+20*math.sin(80*deg)+20*math.sin(30*deg), -80)
x5, y5 = mychain.basic_rot('''+str(x)+","+str(y)+'''-10+10+20*math.sin(80*deg)+20*math.sin(30*deg), -80)
# 下斜 -30 度
x6, y6 = mychain.basic_rot(x5, y5, -30)
# 下方水平單元
x7, y7 = mychain.basic_rot(x6, y6, -0)
'''
return outstring
# 傳繪 D 函式內容
def d(x, y):
outstring = '''
# 利用 chain class 建立案例, 對應到 mychain 變數
mychain = chain()
# 左邊四個垂直單元
#x1, y1 = mychain.basic_rot(0+65*3, 0, 90)
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 90)
x2, y2 = mychain.basic_rot(x1, y1, 90)
x3, y3 = mychain.basic_rot(x2, y2, 90)
x4, y4 = mychain.basic_rot(x3, y3, 90)
# 上方一個水平單元
x5, y5 = mychain.basic_rot(x4, y4, 0)
# 右斜 -40 度
x6, y6 = mychain.basic_rot(x5, y5, -40)
x7, y7 = mychain.basic_rot(x6, y6, -60)
# 右中垂直向下單元
x8, y8 = mychain.basic_rot(x7, y7, -90)
# -120 度
x9, y9 = mychain.basic_rot(x8, y8, -120)
# -140
x10, y10 = mychain.basic_rot(x9, y9, -140)
# 水平接回原點
#mychain.basic(x10, y10, 0+65*3, 0, color="red")
mychain.basic(x10, y10, '''+str(x)+","+str(y)+''')
'''
return outstring
def circle(x, y):
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+''', 50)
'''
for i in range(2, 10):
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*40)+") \n"
return outstring
def circle1(x, y, degree=10):
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
#degree = 10
first_degree = 90 - degree
repeat = 360 / degree
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
return outstring
def circle2(x, y, degree=10):
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
#degree = 10
first_degree = 90 - degree
repeat = 360 / degree
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
return outstring
def twocircle(x, y):
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
x = 50
y = 0
degree = 12
# 78, 66, 54, 42, 30, 18, 6度
#必須有某些 chain 算座標但是不 render
first_degree = 90 - degree
repeat = 360 / degree
# 第1節也是 virtual chain
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''', True)
#x1, y1 = mychain.basic_rot('''+str(x)+","+str(y)+", "+str(first_degree)+''')
'''
# 這裡要上下各多留一節虛擬 chain, 以便最後進行連接 (x7, y7) 與 (x22, y22)
for i in range(2, int(repeat)+1):
#if i < 7 or i > 23:
if i <= 7 or i >= 23:
# virautl chain
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+", True) \n"
#outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
else:
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+", 90-"+str(i*degree)+") \n"
p = -150
k = 0
degree = 20
# 70, 50, 30, 10
# 從 i=5 開始, 就是 virautl chain
first_degree = 90 - degree
repeat = 360 / degree
# 第1節不是 virtual chain
outstring += '''
#mychain = chain()
p1, k1 = mychain.basic_rot('''+str(p)+","+str(k)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
if i >= 5 and i <= 13:
# virautl chain
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+", 90-"+str(i*degree)+", True) \n"
#outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+", 90-"+str(i*degree)+") \n"
else:
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+", 90-"+str(i*degree)+") \n"
# 上段連接直線
# 從 p5, k5 作為起點
first_degree = 10
repeat = 11
outstring += '''
m1, n1 = mychain.basic_rot(p4, k4, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "m"+str(i)+", n"+str(i)+"=mychain.basic_rot(m"+str(i-1)+", n"+str(i-1)+", "+str(first_degree)+")\n"
# 下段連接直線
# 從 p12, k12 作為起點
first_degree = -10
repeat = 11
outstring += '''
r1, s1 = mychain.basic_rot(p13, k13, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "r"+str(i)+", s"+str(i)+"=mychain.basic_rot(r"+str(i-1)+", s"+str(i-1)+", "+str(first_degree)+")\n"
# 上段右方接點為 x7, y7, 左側則為 m11, n11
outstring += "mychain.basic(x7, y7, m11, n11)\n"
# 下段右方接點為 x22, y22, 左側則為 r11, s11
outstring += "mychain.basic(x22, y22, r11, s11)\n"
return outstring
def eighteenthirty(x, y):
'''
從圖解法與符號式解法得到的兩條外切線座標點
(-203.592946177111, 0.0), (0.0, 0.0), (-214.364148466539, 56.5714145924675), (-17.8936874260919, 93.9794075692901)
(-203.592946177111, 0.0), (0.0, 0.0), (-214.364148466539, -56.5714145924675), (-17.8936874260919, -93.9794075692901)
左邊關鍵鍊條起點 (-233.06, 49.48), 角度 20.78, 圓心 (-203.593, 0.0)
右邊關鍵鍊條起點 (-17.89, 93.9), 角度 4.78, 圓心 (0, 0)
'''
# 20 為鏈條兩圓距
# chain 所圍之圓圈半徑為 20/2/math.asin(degree*math.pi/180/2)
# degree = math.asin(20/2/radius)*180/math.pi
x = 50
y = 0
degree = 20
first_degree = 20.78
startx = -233.06+100
starty = 49.48
repeat = 360 / degree
# 先畫出左邊第一關鍵節
outstring = '''
mychain = chain()
x1, y1 = mychain.basic_rot('''+str(startx)+","+str(starty)+", "+str(first_degree)+''')
'''
# 接著繪製左邊的非虛擬鍊條
for i in range(2, int(repeat)+1):
if i >=2 and i <=11:
# virautl chain
#outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+","+str(first_degree+degree-i*degree)+", True) \n"
else:
outstring += "x"+str(i)+", y"+str(i)+"=mychain.basic_rot(x"+str(i-1)+", y"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
# 接著處理右邊的非虛擬鍊條
# 先畫出右邊第一關鍵節
p = -17.89+100
k = 93.98
degree = 12
first_degree = 4.78
repeat = 360 / degree
# 第1節不是 virtual chain
outstring += '''
#mychain = chain()
p1, k1 = mychain.basic_rot('''+str(p)+","+str(k)+", "+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
if i >=18:
# virautl chain
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+","+str(first_degree+degree-i*degree)+", True) \n"
#outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
else:
outstring += "p"+str(i)+", k"+str(i)+"=mychain.basic_rot(p"+str(i-1)+", k"+str(i-1)+","+str(first_degree+degree-i*degree)+") \n"
# 上段連接直線
# 從 x1, y1 作為起點
first_degree = 10.78
repeat = 10
outstring += '''
m1, n1 = mychain.basic_rot(x1, y1, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "m"+str(i)+", n"+str(i)+"=mychain.basic_rot(m"+str(i-1)+", n"+str(i-1)+", "+str(first_degree)+")\n"
# 下段連接直線
# 從 x11, y11 作為起點
first_degree = -10.78
repeat = 10
outstring += '''
r1, s1 = mychain.basic_rot(x11, y11, '''+str(first_degree)+''')
'''
for i in range(2, int(repeat)+1):
outstring += "r"+str(i)+", s"+str(i)+"=mychain.basic_rot(r"+str(i-1)+", s"+str(i-1)+", "+str(first_degree)+")\n"
return outstring
@a40323148_1.route('/a')
def draw_a():
return head_str + chain_str + a(0, 0) + tail_str
@a40323148_1.route('/b')
def draw_b():
# 每個橫向字元距離為 65 pixels, 上下字距則為 110 pixels
return head_str + chain_str + b(0+65, 0) + tail_str
@a40323148_1.route('/c')
def draw_c():
# 每個橫向字元距離為 65 pixels
return head_str + chain_str + c(0+65*2, 0) + tail_str
@a40323148_1.route('/d')
def draw_d():
return head_str + chain_str + d(0+65*3, 0) + tail_str
@a40323148_1.route('/ab')
def draw_ab():
#return head_str + chain_str + a(0, 0) + b(0+65, 0) + tail_str
return head_str + chain_str + a(0, 0) + b(0, 0-110) + tail_str
@a40323148_1.route('/ac')
def draw_ac():
return head_str + chain_str + a(0, 0) + c(0+65, 0) + tail_str
@a40323148_1.route('/bc')
def draw_bc():
return head_str + chain_str + b(0, 0) + c(0+65, 0) + tail_str
@a40323148_1.route('/abc')
def draw_abc():
return head_str + chain_str + a(0, 0) + b(0+65, 0) + c(0+65*2, 0) + tail_str
@a40323148_1.route('/aaaa')
def draw_aaaa():
outstring = head_str + chain_str
scale = 2
for i in range(20):
scale = scale*0.9
outstring += a(0+10*i, 0, scale=scale)
return outstring + tail_str
#return head_str + chain_str + a(0, 0, scale=1) + a(0+65, 0, scale=0.8, color="red") + a(0+65*2, 0, scale=0.6) + a(0+65*3, 0, scale=0.4, color="red") + tail_str
@a40323148_1.route('/badc')
def draw_badc():
return head_str + chain_str + b(0, 0) + a(0+65, 0) + d(0+65*2, 0) + c(0+65*3, 0) + tail_str
@a40323148_1.route('/abcd')
def draw_abcd():
#return head_str + chain_str + a(0, 0) + b(0+65, 0) + c(0+65*2, 0) + d(0+65*3, 0) + tail_str
return head_str + chain_str + a(0, 110) + b(0, 110-110) + c(0, 110-110*2) + d(0, 110-110*3) + tail_str
@a40323148_1.route('/circle')
def drawcircle():
return head_str + chain_str + circle(0, 0) + tail_str
@a40323148_1.route('/circle1/<degree>', defaults={'x': 0, 'y': 0})
@a40323148_1.route('/circle1/<x>/<degree>', defaults={'y': 0})
@a40323148_1.route('/circle1/<x>/<y>/<degree>')
#@a40323148_1.route('/circle1/<int:x>/<int:y>/<int:degree>')
def drawcircle1(x,y,degree):
return head_str + chain_str + circle1(int(x), int(y), int(degree)) + tail_str
@a40323148_1.route('/circle2/<degree>', defaults={'x': 0, 'y': 0})
@a40323148_1.route('/circle2/<x>/<degree>', defaults={'y': 0})
@a40323148_1.route('/circle2/<x>/<y>/<degree>')
#@a40323148_1.route('/circle2/<int:x>/<int:y>/<int:degree>')
def drawcircle2(x,y,degree):
return head_str + chain_str + circle2(int(x), int(y), int(degree)) + tail_str
@a40323148_1.route('/twocircle/<x>/<y>')
@a40323148_1.route('/twocircle', defaults={'x':0, 'y':0})
def drawtwocircle(x,y):
return head_str + chain_str + twocircle(int(x), int(y)) + tail_str
@a40323148_1.route('/eighteenthirty/<x>/<y>')
@a40323148_1.route('/eighteenthirty', defaults={'x':0, 'y':0})
def draweithteenthirdy(x,y):
return head_str + chain_str + eighteenthirty(int(x), int(y)) + tail_str
@a40323148_1.route('/snap')
# http://svg.dabbles.info/snaptut-base
def snap():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 snap 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="/static/snap.svg-min.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
</head>
<body>
<svg width="800" height="800" viewBox="0 0 800 800" id="svgout"></svg>
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window, document
# 透過 window 與 JSConstructor 從 Brython 物件 snap 擷取 Snap 物件的內容
snap = JSConstructor(window.Snap)
s = snap("#svgout")
# 建立物件時, 同時設定 id 名稱
r = s.rect(10,10,100,100).attr({'id': 'rect'})
c = s.circle(100,100,50).attr({'id': 'circle'})
r.attr('fill', 'red')
c.attr({ 'fill': 'blue', 'stroke': 'black', 'strokeWidth': 10 })
r.attr({ 'stroke': '#123456', 'strokeWidth': 20 })
s.text(180,100, '點按一下圖形').attr({'fill' : 'blue', 'stroke': 'blue', 'stroke-width': 0.2 })
g = s.group().attr({'id': 'tux'})
def hoverover(ev):
g.animate({'transform': 's1.5r45,t180,20'}, 1000, window.mina.bounce)
def hoverout(ev):
g.animate({'transform': 's1r0,t180,20'}, 1000, window.mina.bounce)
# callback 函式
def onSVGLoaded(data):
#s.append(data)
g.append(data)
#g.hover(hoverover, hoverout )
g.text(300,100, '拿滑鼠指向我')
# 利用 window.Snap.load 載入 svg 檔案
tux = window.Snap.load("/static/Dreaming_tux.svg", onSVGLoaded)
g.transform('t180,20')
# 與視窗事件對應的函式
def rtoyellow(ev):
r.attr('fill', 'yellow')
def ctogreen(ev):
c.attr('fill', 'green')
# 根據物件 id 綁定滑鼠事件執行對應函式
document['rect'].bind('click', rtoyellow)
document['circle'].bind('click', ctogreen)
document['tux'].bind('mouseover', hoverover)
document['tux'].bind('mouseleave', hoverout)
</script>
</body>
</html>
'''
return outstring
@a40323148_1.route('/snap_link')
# http://svg.dabbles.info/
def snap_link():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 snap 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="/static/snap.svg-min.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
</head>
<body>
<svg width="800" height="800" viewBox="0 0 800 800" id="svgout"></svg>
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window, document
# 透過 window 與 JSConstructor 從 Brython 物件 snap 擷取 Snap 物件的內容
snap = JSConstructor(window.Snap)
# 使用 id 為 "svgout" 的 svg 標註進行繪圖
s = snap("#svgout")
offsetY = 50
# 是否標訂出繪圖範圍
#borderRect = s.rect(0,0,800,640,10,10).attr({ 'stroke': "silver", 'fill': "silver", 'strokeWidth': "3" })
g = s.group().transform('t250,120')
r0 = s.rect(150,150,100,100,20,20).attr({ 'fill': "orange", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c0 = s.circle(225,225,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c0' })
g0 = s.group( r0,c0 ).attr({ 'id': 'g0' })
#g0.animate({ 'transform' : 't250,120r360,225,225' },4000)
g0.appendTo( g )
g0.animate({ 'transform' : 'r360,225,225' },4000)
# 讓 g0 可以拖動
g0.drag()
r1 = s.rect(100,100,100,100,20,20).attr({ 'fill': "red", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c1 = s.circle(175,175,10).attr({ 'fill': "silver", 'stroke': "black" , 'strokeWidth': "4"}).attr({ 'id': 'c1' })
g1 = s.group( r1,c1 ).attr({ 'id': 'g1' })
g1.appendTo( g0 ).attr({ 'id': 'g1' })
g1.animate({ 'transform' : 'r360,175,175' },4000)
r2 = s.rect(50,50,100,100,20,20).attr({ 'fill': "blue", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c2 = s.circle(125,125,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c2' })
g2 = s.group(r2,c2).attr({ 'id': 'g2' })
g2.appendTo( g1 );
g2.animate( { 'transform' : 'r360,125,125' },4000);
r3 = s.rect(0,0,100,100,20,20).attr({ 'fill': "yellow", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c3 = s.circle(75,75,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c3' })
g3 = s.group(r3,c3).attr({ 'id': 'g3' })
g3.appendTo( g2 )
g3.animate( { 'transform' : 'r360,75,75' },4000)
r4 = s.rect(-50,-50,100,100,20,20).attr({ 'fill': "green", 'opacity': "0.8", 'stroke': "black", 'strokeWidth': "2" })
c4 = s.circle(25,25,10).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "4" }).attr({ 'id': 'c4' })
g4 = s.group(r4,c4).attr({ 'id': 'g4' });
g4.appendTo( g3 )
g4.animate( { 'transform' : 'r360,25,25' },4000)
</script>
</body>
</html>
'''
return outstring
@a40323148_1.route('/snap_gear')
def snap_gear():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 snap 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="/static/snap.svg-min.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
</head>
<body>
<svg width="800" height="800" viewBox="0 0 800 800" id="svgout"></svg>
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window, document
# 透過 window 與 JSConstructor 從 Brython 物件 snap 擷取 Snap 物件的內容
snap = JSConstructor(window.Snap)
s = snap("#svgout")
# 畫直線
s.line(0, 0, 100, 100).attr({ 'fill': "silver", 'stroke': "black", 'strokeWidth': "1" }).attr({ 'id': 'line1' })
</script>
</body>
</html>
'''
return outstring
@a40323148_1.route('/ag1_2D')
def ag1_2D():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://2015fallhw.github.io/cptocadp/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://2015fallhw.github.io/cptocadp/static/Cango2D-7v01-min.js"></script>
<script type="text/javascript" src="http://2015fallhw.github.io/cptocadp/static/gearUtils-05.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
</head>
<body>
<canvas id='gear1' width='800' height='750'></canvas>
<script type="text/python">
# 將 導入的 document 設為 doc 主要原因在於與舊程式碼相容
from browser import document as doc
# 由於 Python3 與 Javascript 程式碼已經不再混用, 因此來自 Javascript 的變數, 必須居中透過 window 物件轉換
from browser import window
# 針對 Javascript 既有的物件, 則必須透過 JSConstructor 轉換
from javascript import JSConstructor
import math
# 主要用來取得畫布大小
canvas = doc["gear1"]
# 此程式採用 Cango Javascript 程式庫繪圖, 因此無需 ctx
#ctx = canvas.getContext("2d")
# 針對類別的轉換, 將 Cango.js 中的 Cango 物件轉為 Python cango 物件
cango = JSConstructor(window.Cango)
# 針對變數的轉換, shapeDefs 在 Cango 中資料型別為變數, 可以透過 window 轉換
shapedefs = window.shapeDefs
# 目前 Cango 結合 Animation 在 Brython 尚無法運作, 此刻只能繪製靜態圖形
# in CangoAnimation.js
#interpolate1 = window.interpolate
# Cobi 與 createGearTooth 都是 Cango Javascript 程式庫中的物件
cobj = JSConstructor(window.Cobj)
creategeartooth = JSConstructor(window.createGearTooth)
# 經由 Cango 轉換成 Brython 的 cango, 指定將圖畫在 id="plotarea" 的 canvas 上
cgo = cango("gear1")
######################################
# 畫正齒輪輪廓
#####################################
def spur(cx, cy, m, n, pa, theta):
# n 為齒數
#n = 17
# pa 為壓力角
#pa = 25
# m 為模數, 根據畫布的寬度, 計算適合的模數大小
# Module = mm of pitch diameter per tooth
#m = 0.8*canvas.width/n
# pr 為節圓半徑
pr = n*m/2 # gear Pitch radius
# generate gear
data = creategeartooth(m, n, pa)
# Brython 程式中的 print 會將資料印在 Browser 的 console 區
#print(data)
gearTooth = cobj(data, "SHAPE", {
"fillColor":"#ddd0dd",
"border": True,
"strokeColor": "#606060" })
#gearTooth.rotate(180/n) # rotate gear 1/2 tooth to mesh, 請注意 rotate 角度為 degree
# theta 為角度
gearTooth.rotate(theta)
# 單齒的齒形資料經過旋轉後, 將資料複製到 gear 物件中
gear = gearTooth.dup()
# gear 為單一齒的輪廓資料
#cgo.render(gearTooth)
# 利用單齒輪廓旋轉, 產生整個正齒輪外形
for i in range(1, n):
# 將 gearTooth 中的資料複製到 newTooth
newTooth = gearTooth.dup()
# 配合迴圈, newTooth 的齒形資料進行旋轉, 然後利用 appendPath 方法, 將資料併入 gear
newTooth.rotate(360*i/n)
# appendPath 為 Cango 程式庫中的方法, 第二個變數為 True, 表示要刪除最前頭的 Move to SVG Path 標註符號
gear.appendPath(newTooth, True) # trim move command = True
# 建立軸孔
# add axle hole, hr 為 hole radius
hr = 0.6*pr # diameter of gear shaft
shaft = cobj(shapedefs.circle(hr), "PATH")
shaft.revWinding()
gear.appendPath(shaft) # retain the 'moveTo' command for shaft sub path
gear.translate(cx, cy)
# render 繪出靜態正齒輪輪廓
cgo.render(gear)
# 接著繪製齒輪的基準線
deg = math.pi/180
Line = cobj(['M', cx, cy, 'L', cx+pr*math.cos(theta*deg), cy+pr*math.sin(theta*deg)], "PATH", {
'strokeColor':'red', 'lineWidth': 5})
cgo.render(Line)
# 3個齒輪的齒數
n1 = 17
n2 = 29
n3 = 15
# m 為模數, 根據畫布的寬度, 計算適合的模數大小
# Module = mm of pitch diameter per tooth
# 利用 80% 的畫布寬度進行繪圖
# 計算模數的對應尺寸
m = canvas.width*0.8/(n1+n2+n3)
# 根據齒數與模組計算各齒輪的節圓半徑
pr1 = n1*m/2
pr2 = n2*m/2
pr3 = n3*m/2
# 畫布左右兩側都保留畫布寬度的 10%
# 依此計算對應的最左邊齒輪的軸心座標
cx = canvas.width*0.1+pr1
cy = canvas.height/2
# pa 為壓力角
pa = 25
# 畫最左邊齒輪, 定位線旋轉角為 0, 軸心座標 (cx, cy)
spur(cx, cy, m, n1, pa, 0)
# 第2個齒輪將原始的定位線逆時鐘轉 180 度後, 與第1個齒輪正好齒頂與齒頂對齊
# 只要第2個齒輪再逆時鐘或順時鐘轉動半齒的角度, 即可完成囓合
# 每一個齒分別包括從齒根到齒頂的範圍, 涵蓋角度為 360/n, 因此所謂的半齒角度為 180/n
spur(cx+pr1+pr2, cy, m, n2, pa, 180-180/n2)
# 第2齒與第3齒的囓合, 首先假定第2齒的定位線在 theta 角為 0 的原始位置
# 如此, 第3齒只要逆時鐘旋轉 180 度後, 再逆時鐘或順時鐘轉動半齒的角度, 即可與第2齒囓合
# 但是第2齒為了與第一齒囓合時, 已經從原始定位線轉了 180-180/n2 度
# 而當第2齒從與第3齒囓合的定位線, 逆時鐘旋轉 180-180/n2 角度後, 原先囓合的第3齒必須要再配合旋轉 (180-180/n2 )*n2/n3
spur(cx+pr1+pr2+pr2+pr3, cy, m, n3, pa, 180-180/n3+(180-180/n2)*n2/n3)
</script>
</body>
</html>
'''
return outstring
@a40323148_1.route('/ag1_2D1')
def ag1_2D1():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
<script type="text/python">
from javascript import JSConstructor
from browser import alert
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -250, 500, 500)
# 畫軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
deg = math.pi/180
# 將繪製鏈條輪廓的內容寫成 class 物件
class chain():
# 輪廓的外型設為 class variable
chamber = "M -6.8397, -1.4894 A 7, 7, 0, 1, 0, 6.8397, -1.4894 A 40, 40, 0, 0, 1, 6.8397, -18.511 A 7, 7, 0, 1, 0, -6.8397, -18.511 A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
#chamber = "M 0, 0 L 0, -20 z"
cgoChamber = window.svgToCgoSVG(chamber)
def __init__(self, fillcolor="green", border=True, strokecolor= "tan", linewidth=2, scale=1):
self.fillcolor = fillcolor
self.border = border
self.strokecolor = strokecolor
self.linewidth = linewidth
self.scale = scale
# 利用鏈條起點與終點定義繪圖
def basic(self, x1, y1, x2, y2):
self.x1 = x1
self.y1 = y1
self.x2 = x2
self.y2 = y2
# 注意, cgo.Chamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": self.fillcolor,
"border": self.border,
"strokeColor": self.strokecolor,
"lineWidth": self.linewidth })
# hole 為原點位置
hole = cobj(shapedefs.circle(4*self.scale), "PATH")
cmbr.appendPath(hole)
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(math.atan2(y2-y1, x2-x1)/deg+90)
# 放大 scale 倍
cgo.render(basic1, x1, y1, self.scale, 0)
# 利用鏈條起點與旋轉角度定義繪圖, 使用內定的 color, border 與 linewidth 變數
def basic_rot(self, x1, y1, rot, v=False):
# 若 v 為 True 則為虛擬 chain, 不 render
self.x1 = x1
self.y1 = y1
self.rot = rot
self.v = v
# 注意, cgoChamber 為成員變數
cmbr = cobj(self.cgoChamber, "SHAPE", {
"fillColor": self.fillcolor,
"border": self.border,
"strokeColor": self.strokecolor,
"lineWidth": self.linewidth })
# hole0 為原點位置
hole = cobj(shapedefs.circle(4*self.scale), "PATH")
cmbr.appendPath(hole)
# 根據旋轉角度, 計算 x2 與 y2
x2 = x1 + 20*math.cos(rot*deg)*self.scale
y2 = y1 + 20*math.sin(rot*deg)*self.scale
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
# 因為鏈條的角度由原點向下垂直, 所以必須轉 90 度, 再考量 atan2 的轉角
basic1.rotate(rot+90)
# 放大 scale 倍
if v == False:
cgo.render(basic1, x1, y1, self.scale, 0)
return x2, y2
mychain = chain()
x1, y1 = mychain.basic_rot(-133.06,49.48, 20.78)
x2, y2=mychain.basic_rot(x1, y1,0.7800000000000011, True)
x3, y3=mychain.basic_rot(x2, y2,-19.22, True)
x4, y4=mychain.basic_rot(x3, y3,-39.22, True)
x5, y5=mychain.basic_rot(x4, y4,-59.22, True)
x6, y6=mychain.basic_rot(x5, y5,-79.22, True)
x7, y7=mychain.basic_rot(x6, y6,-99.22, True)
x8, y8=mychain.basic_rot(x7, y7,-119.22, True)
x9, y9=mychain.basic_rot(x8, y8,-139.22, True)
x10, y10=mychain.basic_rot(x9, y9,-159.22, True)
x11, y11=mychain.basic_rot(x10, y10,-179.22, True)
x12, y12=mychain.basic_rot(x11, y11,-199.22)
x13, y13=mychain.basic_rot(x12, y12,-219.22)
x14, y14=mychain.basic_rot(x13, y13,-239.22)
x15, y15=mychain.basic_rot(x14, y14,-259.22)
x16, y16=mychain.basic_rot(x15, y15,-279.22)
x17, y17=mychain.basic_rot(x16, y16,-299.22)
x18, y18=mychain.basic_rot(x17, y17,-319.22)
#mychain = chain()
p1, k1 = mychain.basic_rot(82.11,93.98, 4.78)
p2, k2=mychain.basic_rot(p1, k1,-7.219999999999999)
p3, k3=mychain.basic_rot(p2, k2,-19.22)
p4, k4=mychain.basic_rot(p3, k3,-31.22)
p5, k5=mychain.basic_rot(p4, k4,-43.22)
p6, k6=mychain.basic_rot(p5, k5,-55.22)
p7, k7=mychain.basic_rot(p6, k6,-67.22)
p8, k8=mychain.basic_rot(p7, k7,-79.22)
p9, k9=mychain.basic_rot(p8, k8,-91.22)
p10, k10=mychain.basic_rot(p9, k9,-103.22)
p11, k11=mychain.basic_rot(p10, k10,-115.22)
p12, k12=mychain.basic_rot(p11, k11,-127.22)
p13, k13=mychain.basic_rot(p12, k12,-139.22)
p14, k14=mychain.basic_rot(p13, k13,-151.22)
p15, k15=mychain.basic_rot(p14, k14,-163.22)
p16, k16=mychain.basic_rot(p15, k15,-175.22)
p17, k17=mychain.basic_rot(p16, k16,-187.22)
p18, k18=mychain.basic_rot(p17, k17,-199.22, True)
p19, k19=mychain.basic_rot(p18, k18,-211.22, True)
p20, k20=mychain.basic_rot(p19, k19,-223.22, True)
p21, k21=mychain.basic_rot(p20, k20,-235.22, True)
p22, k22=mychain.basic_rot(p21, k21,-247.22, True)
p23, k23=mychain.basic_rot(p22, k22,-259.22, True)
p24, k24=mychain.basic_rot(p23, k23,-271.22, True)
p25, k25=mychain.basic_rot(p24, k24,-283.22, True)
p26, k26=mychain.basic_rot(p25, k25,-295.22, True)
p27, k27=mychain.basic_rot(p26, k26,-307.22, True)
p28, k28=mychain.basic_rot(p27, k27,-319.22, True)
p29, k29=mychain.basic_rot(p28, k28,-331.22, True)
p30, k30=mychain.basic_rot(p29, k29,-343.22, True)
m1, n1 = mychain.basic_rot(x1, y1, 10.78)
m2, n2=mychain.basic_rot(m1, n1, 10.78)
m3, n3=mychain.basic_rot(m2, n2, 10.78)
m4, n4=mychain.basic_rot(m3, n3, 10.78)
m5, n5=mychain.basic_rot(m4, n4, 10.78)
m6, n6=mychain.basic_rot(m5, n5, 10.78)
m7, n7=mychain.basic_rot(m6, n6, 10.78)
m8, n8=mychain.basic_rot(m7, n7, 10.78)
m9, n9=mychain.basic_rot(m8, n8, 10.78)
m10, n10=mychain.basic_rot(m9, n9, 10.78)
r1, s1 = mychain.basic_rot(x11, y11, -10.78)
r2, s2=mychain.basic_rot(r1, s1, -10.78)
r3, s3=mychain.basic_rot(r2, s2, -10.78)
r4, s4=mychain.basic_rot(r3, s3, -10.78)
r5, s5=mychain.basic_rot(r4, s4, -10.78)
r6, s6=mychain.basic_rot(r5, s5, -10.78)
r7, s7=mychain.basic_rot(r6, s6, -10.78)
r8, s8=mychain.basic_rot(r7, s7, -10.78)
r9, s9=mychain.basic_rot(r8, s8, -10.78)
r10, s10=mychain.basic_rot(r9, s9, -10.78)
</script>
</body>
</html>
'''
return outstring
| 2015fallhw/cdw11 | users/a/g1/a40323148_1.py | Python | agpl-3.0 | 37,865 |
# coding=utf-8
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017
"""
Contains test related code that is executed at runtime
in the context of the application under test.
"""
import streamsx.ec as ec
import streamsx.topology.context as stc
import os
import unittest
import logging
import collections
import threading
from streamsx.rest import StreamsConnection
from streamsx.rest import StreamingAnalyticsConnection
from streamsx.topology.context import ConfigParams
import time
class Condition(object):
"""A condition for testing.
Args:
name(str): Condition name, must be unique within the tester.
"""
_METRIC_PREFIX = "streamsx.condition:"
@staticmethod
def _mn(mt, name):
return Condition._METRIC_PREFIX + mt + ":" + name
def __init__(self, name=None):
self.name = name
self._starts_valid = False
self._valid = False
self._fail = False
@property
def valid(self):
"""Is the condition valid.
A subclass must set `valid` when the condition becomes valid.
"""
return self._valid
@valid.setter
def valid(self, v):
if self._fail:
return
if self._valid != v:
if v:
self._metric_valid.value = 1
else:
self._metric_valid.value = 0
self._valid = v
self._metric_seq += 1
def fail(self):
"""Fail the condition.
Marks the condition as failed. Once a condition has failed it
can never become valid, the test that uses the condition will fail.
"""
self._metric_fail.value = 1
self.valid = False
self._fail = True
if (ec.is_standalone()):
raise AssertionError("Condition failed:" + str(self))
def __getstate__(self):
# Remove metrics from saved state.
state = self.__dict__.copy()
for key in state:
if key.startswith('_metric'):
del state[key]
return state
def __setstate__(self, state):
self.__dict__.update(state)
def __enter__(self):
self._metric_valid = self._create_metric("valid", kind='Gauge')
self._metric_seq = self._create_metric("seq")
self._metric_fail = self._create_metric("fail", kind='Gauge')
if self._starts_valid:
self.valid = True
def __exit__(self, exc_type, exc_value, traceback):
if (ec.is_standalone()):
if not self._fail and not self.valid:
raise AssertionError("Condition failed:" + str(self))
def _create_metric(self, mt, kind=None):
return ec.CustomMetric(self, name=Condition._mn(mt, self.name), kind=kind)
class _TupleExactCount(Condition):
def __init__(self, target, name=None):
super(_TupleExactCount, self).__init__(name)
self.target = target
self.count = 0
self._starts_valid = target == 0
def __call__(self, tuple):
self.count += 1
self.valid = self.target == self.count
if self.count > self.target:
self.fail()
def __str__(self):
return "Exact tuple count: expected:" + str(self.target) + " received:" + str(self.count)
class _TupleAtLeastCount(Condition):
def __init__(self, target, name=None):
super(_TupleAtLeastCount, self).__init__(name)
self.target = target
self.count = 0
self._starts_valid = target == 0
def __call__(self, tuple):
self.count += 1
self.valid = self.count >= self.target
def __str__(self):
return "At least tuple count: expected:" + str(self.target) + " received:" + str(self.count)
class _StreamContents(Condition):
def __init__(self, expected, name=None):
super(_StreamContents, self).__init__(name)
self.expected = expected
self.received = []
def __call__(self, tuple):
self.received.append(tuple)
if len(self.received) > len(self.expected):
self.fail()
return
if self._check_for_failure():
return
self.valid = len(self.received) == len(self.expected)
def _check_for_failure(self):
"""Check for failure.
"""
if self.expected[len(self.received) - 1] != self.received[-1]:
self.fail()
return True
return False
def __str__(self):
return "Stream contents: expected:" + str(self.expected) + " received:" + str(self.received)
class _UnorderedStreamContents(_StreamContents):
def _check_for_failure(self):
"""Unordered check for failure.
Can only check when the expected number of tuples have been received.
"""
if len(self.expected) == len(self.received):
if collections.Counter(self.expected) != collections.Counter(self.received):
self.fail()
return True
return False
class _TupleCheck(Condition):
def __init__(self, checker, name=None):
super(_TupleCheck, self).__init__(name)
self.checker = checker
def __call__(self, tuple):
if not self.checker(tuple):
self.fail()
else:
# Will not override if already failed
self.valid = True
def __str__(self):
return "Tuple checker:" + str(self.checker)
| IBMStreams/streamsx.topology | test/python/spl/tk17/opt/.__splpy/packages/streamsx/topology/tester_runtime.py | Python | apache-2.0 | 5,369 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
"""
Model test set
"""
import unittest
from tr55.model import runoff_nrcs, runoff_pitt, \
simulate_cell_day, simulate_water_quality, \
create_unmodified_census, create_modified_census, \
simulate_day, compute_bmp_effect
from tr55.tablelookup import lookup_ki
# These data are taken directly from Table 2-1 of the revised (1986)
# TR-55 report. The data in the PS array are various precipitation
# levels, and each respective CNx array is the calculated runoff for
# that particular curve number with the given level of precipitation
# corresponding to that in PS.
PS = [1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0] # noqa
CN55 = [0.000, 0.000, 0.000, 0.000, 0.000, 0.020, 0.080, 0.190, 0.350, 0.530, 0.740, 0.980, 1.520, 2.120, 2.780, 3.490, 4.230, 5.000, 5.790, 6.610, 7.440, 8.290] # noqa
CN70 = [0.000, 0.030, 0.060, 0.110, 0.170, 0.240, 0.460, 0.710, 1.010, 1.330, 1.670, 2.040, 2.810, 3.620, 4.460, 5.330, 6.220, 7.130, 8.050, 8.980, 9.910, 10.85] # noqa
CN80 = [0.080, 0.150, 0.240, 0.340, 0.440, 0.560, 0.890, 1.250, 1.640, 2.040, 2.460, 2.890, 3.780, 4.690, 5.630, 6.570, 7.520, 8.480, 9.450, 10.42, 11.39, 12.37] # noqa
CN90 = [0.320, 0.460, 0.610, 0.760, 0.930, 1.090, 1.530, 1.980, 2.450, 2.920, 3.400, 3.880, 4.850, 5.820, 6.810, 7.790, 8.780, 9.770, 10.76, 11.76, 12.75, 13.74] # noqa
CN95 = [0.560, 0.740, 0.920, 1.110, 1.290, 1.480, 1.960, 2.450, 2.940, 3.430, 3.920, 4.420, 5.410, 6.410, 7.400, 8.400, 9.400, 10.39, 11.39, 12.39, 13.39, 14.39] # noqa
# These are runoffs calculated by Sara Damiano.
PS_PITT = [0.01, 0.08, 0.12, 0.2, 0.39, 0.59, 0.79, 0.98, 1.2, 1.6, 2, 2.4, 2.8, 3.2, 3.5, 3.9, 4.9]
PITT_RES_A = [0.0008, 0.0089, 0.0172, 0.0320, 0.0718, 0.1211, 0.1689, 0.2143, 0.2699, 0.3685, 0.4717, 0.5718, 0.6753, 0.7806, 0.8626, 0.9692, 1.2363]
PITT_COMM_C = [0.0020, 0.0225, 0.0484, 0.0979, 0.2265, 0.3751, 0.5275, 0.6728, 0.8508, 1.1836, 1.5448, 1.8767, 2.2189, 2.5615,2.8206,3.1969,4.1109]
PITT_OPEN_B = [0.0004, 0.0038, 0.0072, 0.0129, 0.0459, 0.0862, 0.1293, 0.1663, 0.2171, 0.2998, 0.6254, 0.7554, 0.8862, 1.0182, 1.1196, 1.2502, 1.5824]
# A census containing several land covers and several land cover modifications.
# This includes BMP's that act as land cover changes.
CENSUS_1 = {
'cell_count': 147,
'distribution': {
'c:developed_high': {
'cell_count': 42
},
'a:deciduous_forest': {
'cell_count': 72
},
'd:developed_med': {
'cell_count': 33
}
},
'modifications': [
{
'change': '::no_till',
'cell_count': 30,
'distribution': {
'c:developed_high': {
'cell_count': 20
},
'd:developed_med': {
'cell_count': 10
}
}
},
{
'change': 'd:barren_land:',
'cell_count': 5,
'distribution': {
'a:deciduous_forest': {
'cell_count': 5
}
},
}
]
}
DAY_OUTPUT_1 = {
"unmodified": {
"cell_count": 147,
"et": 0.1133008163265306,
"inf": 1.1502372848963631,
"runoff": 0.7364618987771062,
"tn": 0.21658484153801666,
"tp": 0.021950439114742523,
"bod": 0.5784025092432281,
"tss": 9.22109038667434,
"distribution": {
"c:developed_high": {
"cell_count": 42,
"et": 0.01242,
"inf": 0.4427799999999999,
"runoff": 1.5447999999999997,
"tn": 0.12863736150047997,
"tp": 0.012718383199199997,
"bod": 0.35974855334879996,
"tss": 5.948569513454399
},
"a:deciduous_forest": {
"bod": 0.0,
"cell_count": 72,
"et": 0.207,
"inf": 1.7930000000000001, # should be 1.793, but float point rounding issues
"runoff": 0.0,
"tn": 0.0,
"tp": 0.0,
"tss": 0.0
},
"d:developed_med": {
"cell_count": 33,
"et": 0.037259999999999995, # should be 0.03726, but float point rounding issues
"inf": 0.6482460872656175,
"runoff": 1.3144939127343824,
"tn": 0.08794748003753668,
"tp": 0.009232055915542525,
"bod": 0.2186539558944282,
"tss": 3.2725208732199413
}
},
},
"modified": {
"cell_count": 147,
"et": 0.1421681632653061,
"inf": 1.2274582250603976,
"runoff": 0.6303736116742962,
"tn": 0.1720058738819889,
"tp": 0.017485764326923163,
"bod": 0.45556864535008623,
"tss": 7.2307065282292236,
"distribution": {
"a:deciduous_forest": {
'cell_count': 72,
'et': 0.1969375,
'inf': 1.7060957171334694,
'runoff': 0.09696678286653043,
'tn': 0.0003910189397764155,
'tp': 3.9101893977641545e-05,
'bod': 0.0,
'tss': 0.0039101893977641555,
'distribution': {
'a:deciduous_forest': {
'cell_count': 67,
'et': 0.207,
'inf': 1.793,
'runoff': 0.0,
'tn': 0.0,
'tp': 0.0,
'bod': 0.0,
'tss': 0.0
},
'd:barren_land:': {
'cell_count': 5,
'et': 0.0621,
'inf': 0.5415783267219616,
'runoff': 1.3963216732780384,
'tn': 0.0003910189397764155,
'tp': 3.9101893977641545e-05,
'bod': 0.0,
'tss': 0.0039101893977641555
}
}
},
"c:developed_high": {
'cell_count': 42,
'inf': 0.7866861254657073,
'et': 0.09522,
'runoff': 1.1180938745342925,
'tn': 0.09310502714263344,
'tp': 0.009205299293763191,
'bod': 0.26037846573787315,
'tss': 4.30544998396867,
'distribution': {
'c:developed_high:no_till': {
'cell_count': 20,
'et': 0.1863,
'inf': 1.1649828634779853,
'runoff': 0.6487171365220146,
'tn': 0.025723552070953434,
'tp': 0.0025432890465631923,
'bod': 0.07193874731707317,
'tss': 1.1895326197782703
},
'c:developed_high': {
'cell_count': 22,
'et': 0.012419999999999999,
'inf': 0.44277999999999995,
'runoff': 1.5448,
'tn': 0.06738147507168,
'tp': 0.0066620102471999995,
'bod': 0.1884397184208,
'tss': 3.1159173641904
}
}
},
"d:developed_med": {
'cell_count': 33,
'et': 0.08242363636363635,
'inf': 0.7441409145669375,
'runoff': 1.173435449069426,
'tn': 0.07850982779957905,
'tp': 0.00824136313918233,
'bod': 0.1951901796122131,
'tss': 2.9213463548627887,
'distribution': {
'd:developed_med:no_till': {
'cell_count': 10,
'et': 0.1863,
'inf': 0.9646990173599737,
'runoff': 0.8490009826400262,
'tn': 0.017213099288568623,
'tp': 0.0018068999253193583,
'bod': 0.042794998231247966,
'tss': 0.6404984735276779
},
'd:developed_med': {
'cell_count': 23,
'et': 0.037259999999999995,
'inf': 0.6482460872656175,
'runoff': 1.3144939127343824,
'tn': 0.06129672851101042,
'tp': 0.006434463213862972,
'bod': 0.15239518138096514,
'tss': 2.2808478813351107
}
}
}
}
}
}
CENSUS_2 = {
'cell_count': 40,
'BMPs': {
'rain_garden': 8,
'green_roof': 16
},
'distribution': {
'd:developed_med': {'cell_count': 10},
'c:developed_high': {'cell_count': 10},
'a:deciduous_forest': {'cell_count': 10},
'b:pasture': {'cell_count': 10}
},
'modifications': [
{
'change': '::no_till',
'cell_count': 1,
'distribution': {
'b:pasture': {'cell_count': 1}
}
},
{
'change': '::cluster_housing',
'cell_count': 1,
'distribution': {
'd:developed_med': {'cell_count': 1}
}
}
]
}
DAY_OUTPUT_2 = {
'unmodified': {
'BMPs': {
'rain_garden': 8,
'green_roof': 16
},
'cell_count': 40,
'et': 0.11333250000000002,
'inf': 1.1535619337299798,
'runoff': 0.7331055662700201,
'tn': 0.059617339229556615,
'tp': 0.006051042657319955,
'bod': 0.15723760149623622,
'tss': 2.4673867111493006,
'distribution': {
'a:deciduous_forest': {
'cell_count': 10,
'et': 0.207,
'inf': 1.793,
'runoff': 0.0,
'tn': 0.0,
'tp': 0.0,
'bod': 0.0,
'tss': 0.0
},
'b:pasture': {
'cell_count': 10,
'et': 0.19665,
'inf': 1.7302216476543024,
'runoff': 0.0731283523456977,
'tn': 0.0023386444886303553,
'tp': 0.000225263479640402,
'bod': 0.005324409518773138,
'tss': 0.05938764463246961
},
'c:developed_high': {
'cell_count': 10,
'et': 0.012419999999999999,
'inf': 0.44277999999999995,
'runoff': 1.5448,
'tn': 0.030627943214399996,
'tp': 0.003028186476,
'bod': 0.08565441746399999,
'tss': 1.4163260746319999
},
'd:developed_med': {
'cell_count': 10,
'et': 0.037259999999999995,
'inf': 0.6482460872656175,
'runoff': 1.3144939127343824,
'tn': 0.026650751526526267,
'tp': 0.0027975927016795532,
'bod': 0.0662587745134631,
'tss': 0.9916729918848309
}
}
},
'modified': {
'BMPs': {
'rain_garden': 8,
'green_roof': 16
},
'cell_count': 40,
'inf': 1.3866270217345416,
'et': 0.11431574999999998,
'runoff': 0.4990572282654582,
'tn': 0.040782174111145944,
'tp': 0.004134835152274705,
'bod': 0.10743922519457133,
'tss': 1.6812394993524349,
'distribution': {
'a:deciduous_forest': {
'cell_count': 10,
'et': 0.207,
'inf': 1.793,
'runoff': 0.0,
'tn': 0.0,
'tp': 0.0,
'bod': 0.0,
'tss': 0.0,
'distribution': {
'a:deciduous_forest': {
'cell_count': 10,
'et': 0.207,
'inf': 1.793,
'runoff': 0.0,
'tn': 0.0,
'tp': 0.0,
'bod': 0.0,
'tss': 0.0
}
}
},
'b:pasture': {
'cell_count': 10,
'et': 0.19561499999999998,
'inf': 1.7375451880661668,
'runoff': 0.06683981193383332,
'tn': 0.0021375369851245538,
'tp': 0.00020589235408380115,
'bod': 0.0048665465510716625,
'tss': 0.05428071153118393,
'distribution': {
'b:pasture:no_till': {
'cell_count': 1,
'et': 0.1863,
'inf': 1.594902340195513,
'runoff': 0.2187976598044872,
'tn': 0.0006997148504154522,
'tp': 6.739810292968453e-05,
'bod': 0.0015930460692470888,
'tss': 0.017768590772371376
},
'b:pasture': {
'cell_count': 9,
'et': 0.19665,
'inf': 1.7533943933851281,
'runoff': 0.04995560661487177,
'tn': 0.0014378221347091014,
'tp': 0.00013849425115411662,
'bod': 0.0032735004818245737,
'tss': 0.03651212075881256
}
}
},
'c:developed_high': {
'cell_count': 10,
'inf': 0.9322927054928913,
'et': 0.012419999999999999,
'runoff': 1.0552872945071088,
'tn': 0.02092263032822472,
'tp': 0.002068621642621088,
'bod': 0.058512440748425064,
'tss': 0.9675238939916346,
'distribution': {
'c:developed_high': {
'cell_count': 10,
'et': 0.012419999999999999,
'inf': 0.9322927054928913,
'runoff': 1.0552872945071088,
'tn': 0.02092263032822472,
'tp': 0.002068621642621088,
'bod': 0.058512440748425064,
'tss': 0.9675238939916346
}
}
},
'd:developed_med': {
'cell_count': 10,
'inf': 1.0836701933791093,
'et': 0.042228,
'runoff': 0.8741018066208908,
'tn': 0.017722006797796674,
'tp': 0.0018603211555698164,
'bod': 0.0440602378950746,
'tss': 0.6594348938296164,
'distribution': {
'd:developed_med': {
'cell_count': 9,
'et': 0.037259999999999995,
'inf': 1.0647799243814537,
'runoff': 0.8979600756185461,
'tn': 0.016385149875393707,
'tp': 0.0017199881084667425,
'bod': 0.040736560463686004,
'tss': 0.6096905216065004
},
'd:developed_med:cluster_housing': {
'cell_count': 1,
'et': 0.08693999999999999,
'inf': 1.253682614358008,
'runoff': 0.6593773856419921,
'tn': 0.001336856922402968,
'tp': 0.000140333047103074,
'bod': 0.0033236774313885943,
'tss': 0.04974437222311596
}
}
}
}
}
}
def simulate(precip, tile_string):
land_use = tile_string.split(':')[1]
ki = lookup_ki(land_use)
return simulate_cell_day(precip, 0.207 * ki, tile_string, 1)
def average(l):
return reduce(lambda x, y: x + y, l) / len(l)
class TestModel(unittest.TestCase):
"""
Model test set.
"""
def test_nrcs(self):
"""
Test the implementation of the runoff equation.
"""
# This pair has CN=55
runoffs = [round(runoff_nrcs(precip, 0.0, 'b', 'deciduous_forest'), 2)
for precip in PS]
# Low curve number and low P cause too-high runoff
self.assertEqual(runoffs[4:], CN55[4:])
# This pair has CN=70
runoffs = [round(runoff_nrcs(precip, 0.0, 'c', 'deciduous_forest'), 2)
for precip in PS]
self.assertEqual(runoffs[1:], CN70[1:])
# This pair has CN=80
runoffs = [round(runoff_nrcs(precip, 0.0, 'd', 'pasture'), 2)
for precip in PS]
self.assertEqual(runoffs, CN80)
# This pair has CN=95
runoffs = [round(runoff_nrcs(precip, 0.0, 'c', 'developed_high'), 2)
for precip in PS]
self.assertEqual(runoffs, CN95)
def test_pitt(self):
"""
Test the implementation of the SSH/Pitt runoff model.
"""
runoff_modeled = [round(runoff_pitt(precip, 0.0, 'c', 'developed_high'), 2)
for precip in PS_PITT]
runnoff_test_suite = [round(runoff, 2)
for runoff in PITT_COMM_C]
self.assertEqual(runnoff_test_suite, runoff_modeled)
runoff_modeled = [round(runoff_pitt(precip, 0.0, 'b', 'developed_open'), 2)
for precip in PS_PITT]
runnoff_test_suite = [round(runoff, 2)
for runoff in PITT_OPEN_B]
self.assertEqual(runoff_modeled, runnoff_test_suite)
runoff_modeled = [round(runoff_pitt(precip, 0.0, 'a', 'developed_low'), 2)
for precip in PS_PITT]
runnoff_test_suite = [round(runoff, 2)
for runoff in PITT_RES_A]
self.assertEqual(runoff_modeled, runnoff_test_suite)
def test_simulate_cell_day(self):
"""
Test the simulate_cell_day function.
"""
result1 = simulate_cell_day(42, 93, 'a:barren_land:', 1)
result2 = simulate_cell_day(42, 93, 'a:barren_land:', 2)
self.assertEqual(result1['runoff-vol'] * 2, result2['runoff-vol'])
def test_create_unmodified_census(self):
"""
Test create_unmodified_census.
"""
census = {
"cell_count": 2,
"distribution": {
"a:barren_land": {"cell_count": 1},
"a:open_water": {"cell_count": 1}
},
"modifications": [
{
"change": "::cluster_housing",
"cell_count": 1,
"distribution": {
"a:barren_land": {"cell_count": 1}
}
}
]
}
result = create_unmodified_census(census)
census.pop("modifications", None)
self.assertEqual(census, result)
def test_create_modified_census_1(self):
"""
create_modified_census from a census w/o modifications.
"""
census = {
"cell_count": 5,
"distribution": {
"a:barren_land": {"cell_count": 3},
"a:open_water": {"cell_count": 2}
}
}
expected = {
"cell_count": 5,
"distribution": {
"a:barren_land": {
"cell_count": 3,
"distribution": {"a:barren_land": {"cell_count": 3}}
},
"a:open_water": {
"cell_count": 2,
"distribution": {"a:open_water": {"cell_count": 2}}
}
}
}
actual = create_modified_census(census)
self.assertEqual(actual, expected)
def test_create_modified_census_2(self):
"""
create_modified_census from a census w/ trivial modifications.
"""
census = {
"cell_count": 3,
"distribution": {
"a:barren_land": {"cell_count": 2},
"a:open_water": {"cell_count": 1}
},
"modifications": []
}
expected = {
"cell_count": 3,
"distribution": {
"a:barren_land": {
"cell_count": 2,
"distribution": {"a:barren_land": {"cell_count": 2}}
},
"a:open_water": {
"cell_count": 1,
"distribution": {"a:open_water": {"cell_count": 1}}
}
}
}
actual = create_modified_census(census)
self.assertEqual(actual, expected)
def test_create_modified_census_3(self):
"""
create_modified_census with non-trivial modifications.
"""
census = {
"cell_count": 144,
"distribution": {
"a:barren_land": {"cell_count": 55},
"a:open_water": {"cell_count": 89}
},
"modifications": [
{
"change": "::cluster_housing",
"cell_count": 34,
"distribution": {
"a:barren_land": {"cell_count": 34}
}
}
]
}
expected = {
"cell_count": 144,
"distribution": {
"a:barren_land": {
"cell_count": 55,
"distribution": {
"a:barren_land:cluster_housing": {"cell_count": 34},
"a:barren_land": {"cell_count": 21}
}
},
"a:open_water": {
"cell_count": 89,
"distribution": {
"a:open_water": {"cell_count": 89}
}
}
}
}
actual = create_modified_census(census)
self.assertEqual(actual, expected)
def test_create_modified_census_4(self):
"""
create_modified_census with different types of changes.
"""
census = {
"distribution": {
"a:developed_low": {
"cell_count": 3
}
},
"cell_count": 3,
"modifications": [
{
"distribution": {
"a:developed_low": {
"cell_count": 1
}
},
"cell_count": 1,
"change": ":deciduous_forest:cluster_housing"
},
{
"distribution": {
"a:developed_low": {
"cell_count": 1
}
},
"cell_count": 1,
"change": ":deciduous_forest:"
},
{
"distribution": {
"a:developed_low": {
"cell_count": 1
}
},
"cell_count": 1,
"change": "::cluster_housing"
},
]
}
expected = set([
'a:deciduous_forest:',
'a:developed_low',
'a:deciduous_forest:cluster_housing',
'a:developed_low:cluster_housing'])
modified = create_modified_census(census)
distrib = modified['distribution']['a:developed_low']['distribution']
actual = set(distrib.keys())
self.assertEqual(actual, expected)
def test_simulate_water_quality_1(self):
"""
Test the water quality simulation with unmodified census.
"""
census = {
"cell_count": 5,
"distribution": {
"a:barren_land": {"cell_count": 3},
"a:open_water": {"cell_count": 2}
}
}
def fn(cell, cell_count):
return simulate_cell_day(5, 0.207, cell, cell_count)
simulate_water_quality(census, 93, fn)
left = census['distribution']['a:barren_land']
right = census['distribution']['a:open_water']
for key in set(census.keys()) - set(['distribution']):
self.assertEqual(left[key] + right[key], census[key])
def test_simulate_water_quality_2(self):
"""
Test the water quality simulation in the presence of modifications.
"""
census = {
"cell_count": 3,
"distribution": {
"a:barren_land": {"cell_count": 2},
"a:open_water": {"cell_count": 1}
},
"modifications": [
{
"change": "d:developed_med:",
"cell_count": 1,
"distribution": {
"a:barren_land": {"cell_count": 1}
}
}
]
}
census1 = create_modified_census(census)
census2 = {
"cell_count": 3,
"distribution": {
"a:barren_land": {"cell_count": 1},
"d:developed_med": {"cell_count": 1},
"a:open_water": {"cell_count": 1}
}
}
def fn(cell, cell_count):
return simulate_cell_day(5, 0.207, cell, cell_count)
simulate_water_quality(census1, 93, fn)
simulate_water_quality(census2, 93, fn)
for key in set(census1.keys()) - set(['distribution']):
self.assertEqual(census1[key], census2[key])
def test_simulate_water_quality_precolumbian(self):
"""
Test the water quality simulation in Pre-Columbian times.
"""
census1 = {
"cell_count": 8,
"distribution": {
"a:developed_med": {"cell_count": 1},
"b:no_till": {"cell_count": 1},
"c:pasture": {"cell_count": 1},
"d:cultivated_crops": {"cell_count": 1},
"a:open_water": {"cell_count": 1},
"b:shrub": {"cell_count": 1},
"c:barren_land": {"cell_count": 1},
"d:developed_open": {"cell_count": 1}
}
}
census2 = {
"cell_count": 8,
"distribution": {
"a:mixed_forest": {"cell_count": 1},
"b:mixed_forest": {"cell_count": 1},
"c:mixed_forest": {"cell_count": 1},
"d:mixed_forest": {"cell_count": 2},
"a:open_water": {"cell_count": 1},
"b:shrub": {"cell_count": 1},
"c:barren_land": {"cell_count": 1}
}
}
census3 = census2.copy()
def fn(cell, cell_count):
return simulate_cell_day(7, 0.107, cell, cell_count)
simulate_water_quality(census1, 93, fn, precolumbian=True)
simulate_water_quality(census2, 93, fn, precolumbian=True)
simulate_water_quality(census3, 93, fn, precolumbian=False)
for key in set(census1.keys()) - set(['distribution']):
self.assertAlmostEqual(census1[key], census2[key])
for key in set(census1.keys()) - set(['distribution']):
self.assertAlmostEqual(census2[key], census3[key])
def test_day_1(self):
"""
Test the simulate_day function with only land cover modifications.
"""
self.maxDiff = None
precip = 2
actual = simulate_day(CENSUS_1, precip)
expected = DAY_OUTPUT_1
self.assertEqual(actual, expected)
def test_day_2(self):
"""
Test the simulate_day function with lots of BMPs.
"""
self.maxDiff = None
precip = 2
actual = simulate_day(CENSUS_2, precip)
expected = DAY_OUTPUT_2
self.assertEqual(actual, expected)
def test_day_with_invalid_census(self):
"""
Test the simulate_day function with a census
that has a modification census with a cover type
that doesn't exist within the AoI census. This is
invalid input. Each land cover type in a modification
census must be represented in AoI census.
"""
census = {
'distribution': {
'b:developed_med': {'cell_count': 400},
},
'cell_count': 400,
'modifications': [
{
'distribution': {
'b:developed_low': {'cell_count': 40}
},
'cell_count': 40,
'change': ':deciduous_forest:'
},
]
}
precip = 3
self.assertRaises(ValueError,
simulate_day, *(census, precip))
def test_bmp_runoff(self):
"""
Make sure that BMPs do not produce negative runoff.
"""
census = {
"cell_count": 1,
"distribution": {
"d:developed_med": {"cell_count": 1}
},
"modifications": [
{
"change": "::green_roof",
"cell_count": 1,
"distribution": {
"d:developed_med": {"cell_count": 1}
}
}
]
}
result = simulate_day(census, 0.984)
self.assertTrue(result['modified']['runoff'] >= 0)
def test_water_balance(self):
"""
Make sure that R, ET, and I sum to precip with no modifications.
"""
census = {
"cell_count": 1,
"distribution": {
"d:developed_med": {"cell_count": 1}
},
}
precip = 0.984
result = simulate_day(census, precip)
runoff = result['modified']['runoff']
et = result['modified']['et']
inf = result['modified']['inf']
total = runoff + et + inf
self.assertEqual(total, precip)
def test_water_balance_1(self):
"""
Make sure that R, ET, and I sum to precip with only land cover modifications.
"""
precip = 2.362
result = simulate_day(CENSUS_1, precip)
runoff = result['modified']['runoff']
et = result['modified']['et']
inf = result['modified']['inf']
total = runoff + et + inf
self.assertEqual(total, precip)
def test_water_balance_2(self):
"""
Make sure that R, ET, and I sum to precip with with lots of BMPs.
"""
precip = 4.429
result = simulate_day(CENSUS_2, precip)
runoff = result['modified']['runoff']
et = result['modified']['et']
inf = result['modified']['inf']
total = runoff + et + inf
self.assertEqual(total, precip)
def test_compute_bmp_effect(self):
"""
Test that the BMP reduction is working as expected.
"""
# This is an abbreviated census without postpass for 3.2"
# of rain on developed_med on soil type c
mod_census = {
'BMPs': {
'porous_paving': 10,
'infiltration_basin': 15
},
'cell_count': 100,
'inf-vol': 90.44788778622261,
'et-vol': 3.7259999999999995,
'runoff-vol': 225.8261122137774,
}
precip = 3.2 # should fill basin but not porous paving
m2_per_pixel = 10
pct = compute_bmp_effect(mod_census, m2_per_pixel, precip)
# No exception should be raised, no bmp effect given
self.assertAlmostEqual(0.8121403161, pct)
def test_compute_bmp_no_runoff(self):
"""
Test that no runoff will not produce errors when computing BMP effects
"""
census = {
'runoff-vol': 0,
'BMPs': {
'green_roof': 1942
}
}
# No exception should be raised, no bmp effect given
self.assertEqual(0, compute_bmp_effect(census, 42, 0.393))
if __name__ == "__main__":
unittest.main()
| WikiWatershed/tr-55 | test/test_model.py | Python | apache-2.0 | 33,009 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2019 Bitergia
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Santiago Dueñas <[email protected]>
#
import argparse
import logging
from .. import api, utils
from ..command import Command, CMD_SUCCESS, HELP_LIST
from ..exceptions import InvalidDateError, NotFoundError, InvalidValueError
logger = logging.getLogger(__name__)
class Log(Command):
"""List enrollment information available in the registry.
The command list a set of enrollments. Some searching parameters
to filter the results are available. Parameters <uuid> and <organization>
filter by unique identity and organization name. Enrollments between a
period can also be listed using <from> and <to> parameters, where
<from> must be less or equal than <to>. Default values for these dates
are '1900-01-01' and '2100-01-01'.
Dates may follow several patterns. The most common and recommended
is 'YYYY-MM-DD'. Optionally, time information can be included using
patters like 'YYYY-MM-DD hh:mm:ss'.
"""
def __init__(self, **kwargs):
super(Log, self).__init__(**kwargs)
self.parser = argparse.ArgumentParser(description=self.description,
usage=self.usage)
# Enrollments search options
self.parser.add_argument('--uuid', default=None,
help="unique identity to withdraw")
self.parser.add_argument('--organization', default=None,
help="organization where the uuid is enrolled")
self.parser.add_argument('--from', dest='from_date', default=None,
help="date (YYYY-MM-DD:hh:mm:ss) when the enrollment starts")
self.parser.add_argument('--to', dest='to_date', default=None,
help="date (YYYY-MM-DD:hh:mm:ss) when the enrollment ends")
# Exit early if help is requested
if 'cmd_args' in kwargs and [i for i in kwargs['cmd_args'] if i in HELP_LIST]:
return
self._set_database(**kwargs)
@property
def description(self):
return """List enrollments."""
@property
def usage(self):
return "%(prog)s log [--uuid <uuid>] [--organization <organization>] [--from <date>] [--to <date>]"
def run(self, *args):
"""List enrollments using search parameters."""
params = self.parser.parse_args(args)
uuid = params.uuid
organization = params.organization
try:
from_date = utils.str_to_datetime(params.from_date)
to_date = utils.str_to_datetime(params.to_date)
code = self.log(uuid, organization, from_date, to_date)
except InvalidDateError as e:
self.error(str(e))
return e.code
return code
def log(self, uuid=None, organization=None, from_date=None, to_date=None):
""""List enrollment information available in the registry.
Method that returns a list of enrollments. If <uuid> parameter is set,
it will return the enrollments related to that unique identity;
if <organization> parameter is given, it will return the enrollments
related to that organization; if both parameters are set, the function
will return the list of enrollments of <uuid> on the <organization>.
Enrollments between a period can also be listed using <from_date> and
<to_date> parameters. When these are set, the method will return
all those enrollments where Enrollment.start >= from_date AND
Enrollment.end <= to_date. Defaults values for these dates are
1900-01-01 and 2100-01-01.
:param db: database manager
:param uuid: unique identifier
:param organization: name of the organization
:param from_date: date when the enrollment starts
:param to_date: date when the enrollment ends
"""
try:
enrollments = api.enrollments(self.db, uuid, organization,
from_date, to_date)
self.display('log.tmpl', enrollments=enrollments)
except (NotFoundError, InvalidValueError) as e:
self.error(str(e))
return e.code
return CMD_SUCCESS
| grimoirelab/sortinghat | sortinghat/cmd/log.py | Python | gpl-3.0 | 4,929 |
"""
Copyright 2008-2011,2015 Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
from sys import platform
import os
import numbers
from gi.repository import GLib
import cairo
import six
from .canvas.colors import FLOWGRAPH_BACKGROUND_COLOR
from . import Constants
def get_rotated_coordinate(coor, rotation):
"""
Rotate the coordinate by the given rotation.
Args:
coor: the coordinate x, y tuple
rotation: the angle in degrees
Returns:
the rotated coordinates
"""
# handles negative angles
rotation = (rotation + 360) % 360
if rotation not in Constants.POSSIBLE_ROTATIONS:
raise ValueError('unusable rotation angle "%s"'%str(rotation))
# determine the number of degrees to rotate
cos_r, sin_r = {
0: (1, 0), 90: (0, 1), 180: (-1, 0), 270: (0, -1),
}[rotation]
x, y = coor
return x * cos_r + y * sin_r, -x * sin_r + y * cos_r
def get_angle_from_coordinates(p1, p2):
"""
Given two points, calculate the vector direction from point1 to point2, directions are multiples of 90 degrees.
Args:
(x1,y1): the coordinate of point 1
(x2,y2): the coordinate of point 2
Returns:
the direction in degrees
"""
(x1, y1) = p1
(x2, y2) = p2
if y1 == y2: # 0 or 180
return 0 if x2 > x1 else 180
else: # 90 or 270
return 270 if y2 > y1 else 90
def align_to_grid(coor, mode=round):
def align(value):
return int(mode(value / (1.0 * Constants.CANVAS_GRID_SIZE)) * Constants.CANVAS_GRID_SIZE)
try:
return [align(c) for c in coor]
except TypeError:
x = coor
return align(coor)
def num_to_str(num):
""" Display logic for numbers """
def eng_notation(value, fmt='g'):
"""Convert a number to a string in engineering notation. E.g., 5e-9 -> 5n"""
template = '{:' + fmt + '}{}'
magnitude = abs(value)
for exp, symbol in zip(range(9, -15-1, -3), 'GMk munpf'):
factor = 10 ** exp
if magnitude >= factor:
return template.format(value / factor, symbol.strip())
return template.format(value, '')
if isinstance(num, numbers.Complex):
num = complex(num) # Cast to python complex
if num == 0:
return '0'
output = eng_notation(num.real) if num.real else ''
output += eng_notation(num.imag, '+g' if output else 'g') + 'j' if num.imag else ''
return output
else:
return str(num)
def encode(value):
"""Make sure that we pass only valid utf-8 strings into markup_escape_text.
Older versions of glib seg fault if the last byte starts a multi-byte
character.
"""
if six.PY2:
valid_utf8 = value.decode('utf-8', errors='replace').encode('utf-8')
else:
valid_utf8 = value
return GLib.markup_escape_text(valid_utf8)
def make_screenshot(flow_graph, file_path, transparent_bg=False):
if not file_path:
return
x_min, y_min, x_max, y_max = flow_graph.get_extents()
padding = Constants.CANVAS_GRID_SIZE
width = x_max - x_min + 2 * padding
height = y_max - y_min + 2 * padding
if file_path.endswith('.png'):
psurf = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
elif file_path.endswith('.pdf'):
psurf = cairo.PDFSurface(file_path, width, height)
elif file_path.endswith('.svg'):
psurf = cairo.SVGSurface(file_path, width, height)
else:
raise ValueError('Unknown file format')
cr = cairo.Context(psurf)
if not transparent_bg:
cr.set_source_rgba(*FLOWGRAPH_BACKGROUND_COLOR)
cr.rectangle(0, 0, width, height)
cr.fill()
cr.translate(padding - x_min, padding - y_min)
flow_graph.create_labels(cr)
flow_graph.create_shapes()
flow_graph.draw(cr)
if file_path.endswith('.png'):
psurf.write_to_png(file_path)
if file_path.endswith('.pdf') or file_path.endswith('.svg'):
cr.show_page()
psurf.finish()
def scale(coor, reverse=False):
factor = Constants.DPI_SCALING if not reverse else 1 / Constants.DPI_SCALING
return tuple(int(x * factor) for x in coor)
def scale_scalar(coor, reverse=False):
factor = Constants.DPI_SCALING if not reverse else 1 / Constants.DPI_SCALING
return int(coor * factor)
def get_modifier_key(angle_brackets=False):
"""
Get the modifier key based on platform.
Args:
angle_brackets: if return the modifier key with <> or not
Returns:
return the string with the modifier key
"""
if platform == "darwin":
if angle_brackets:
return "<Meta>"
else:
return "Meta"
else:
if angle_brackets:
return "<Ctrl>"
else:
return "Ctrl"
_nproc = None
def get_cmake_nproc():
""" Get number of cmake processes for C++ flowgraphs """
global _nproc # Cached result
if _nproc:
return _nproc
try:
# See https://docs.python.org/3.8/library/os.html#os.cpu_count
_nproc = len(os.sched_getaffinity(0))
except:
_nproc = os.cpu_count()
if not _nproc:
_nproc = 1
_nproc = max(_nproc//2 - 1, 1)
return _nproc
| trabucayre/gnuradio | grc/gui/Utils.py | Python | gpl-3.0 | 5,321 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Version information for Invenio-Utils.
This file is imported by ``invenio_utils.__init__``,
and parsed by ``setup.py``.
"""
__version__ = "0.1.0"
| hachreak/invenio-utils | invenio_utils/version.py | Python | gpl-2.0 | 1,123 |
#############################################################################
##
## Copyright (C) 2014 Digia Plc and/or its subsidiary(-ies).
## Contact: http://www.qt-project.org/legal
##
## This file is part of Qt Creator.
##
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Digia. For licensing terms and
## conditions see http://qt.digia.com/licensing. For further information
## use the contact form at http://qt.digia.com/contact-us.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Digia gives you certain additional
## rights. These rights are described in the Digia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
#############################################################################
source("../../shared/qtcreator.py")
# test context sensitive help in edit mode
# place cursor to <lineText> keyword, in <editorArea>, and verify help to contain <helpText>
def verifyInteractiveQMLHelp(lineText, helpText):
editorArea = waitForObject(":Qt Creator_QmlJSEditor::QmlJSTextEditorWidget")
# go to the specified word
placeCursorToLine(editorArea, lineText)
homeKey = "<Home>"
if platform.system() == "Darwin":
homeKey = "<Ctrl+Left>"
type(editorArea, homeKey)
else:
type(editorArea, homeKey)
# call help
type(editorArea, "<F1>")
test.verify(helpText in getHelpTitle(),
"Verifying if help is opened with documentation for '%s'." % helpText)
def main():
global sdkPath
startApplication("qtcreator" + SettingsPath)
if not startedWithoutPluginError():
return
addHelpDocumentation([os.path.join(sdkPath, "Documentation", "qt.qch")])
# create qt quick application
createNewQtQuickApplication(tempDir(), "SampleApp")
# verify Rectangle help
verifyInteractiveQMLHelp("Rectangle {", "QML Rectangle Element")
# go back to edit mode
switchViewTo(ViewConstants.EDIT)
# verify MouseArea help
verifyInteractiveQMLHelp("MouseArea {", "QML MouseArea Element")
# exit
invokeMenuItem("File","Exit")
| maui-packages/qt-creator | tests/system/suite_HELP/tst_HELP05/test.py | Python | lgpl-2.1 | 2,786 |
# -*- coding: utf-8 -*-
##############################################################################
#
# GNU Health: The Free Health and Hospital Information System
# Copyright (C) 2008-2016 Luis Falcon <[email protected]>
# Copyright (C) 2011-2016 GNU Solidario <[email protected]>
#
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from trytond.pool import Pool
from .health_crypto import *
def register():
Pool.register(
PatientPrescriptionOrder,
BirthCertificate,
DeathCertificate,
PatientEvaluation,
module='health_crypto', type_='model')
| kret0s/gnuhealth-live | tryton/server/trytond-3.8.3/trytond/modules/health_crypto/__init__.py | Python | gpl-3.0 | 1,303 |
#!/usr/bin/env python
# PyQt tutorial 7
import sys
from PySide import QtCore, QtGui
class LCDRange(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
lcd = QtGui.QLCDNumber(2)
self.slider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.slider.setRange(0, 99)
self.slider.setValue(0)
self.connect(self.slider, QtCore.SIGNAL("valueChanged(int)"),
lcd, QtCore.SLOT("display(int)"))
self.connect(self.slider, QtCore.SIGNAL("valueChanged(int)"),
self, QtCore.SIGNAL("valueChanged(int)"))
layout = QtGui.QVBoxLayout()
layout.addWidget(lcd)
layout.addWidget(self.slider)
self.setLayout(layout)
def value(self):
return self.slider.value()
def setValue(self, value):
self.slider.setValue(value)
class MyWidget(QtGui.QWidget):
def __init__(self, parent=None):
QtGui.QWidget.__init__(self, parent)
quit = QtGui.QPushButton("Quit")
quit.setFont(QtGui.QFont("Times", 18, QtGui.QFont.Bold))
self.connect(quit, QtCore.SIGNAL("clicked()"),
QtGui.qApp, QtCore.SLOT("quit()"))
grid = QtGui.QGridLayout()
previousRange = None
layout = QtGui.QVBoxLayout()
layout.addWidget(quit)
layout.addLayout(grid)
self.setLayout(layout)
for row in range(3):
for column in range(3):
lcdRange = LCDRange()
grid.addWidget(lcdRange, row, column)
if previousRange:
self.connect(lcdRange, QtCore.SIGNAL("valueChanged(int)"),
previousRange.setValue)
previousRange = lcdRange
app = QtGui.QApplication(sys.argv)
widget = MyWidget()
widget.show()
sys.exit(app.exec_())
| cherry-wb/SideTools | examples/tutorial/t7.py | Python | apache-2.0 | 1,953 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update encrypted deploy password in Travis config file
"""
from __future__ import print_function
import base64
import json
import os
from getpass import getpass
import yaml
from cryptography.hazmat.primitives.serialization import load_pem_public_key
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15
try:
from urllib import urlopen
except:
from urllib.request import urlopen
GITHUB_REPO = 'martensm/pywaifu'
TRAVIS_CONFIG_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '.travis.yml')
def load_key(pubkey):
"""Load public RSA key, with work-around for keys using
incorrect header/footer format.
Read more about RSA encryption with cryptography:
https://cryptography.io/latest/hazmat/primitives/asymmetric/rsa/
"""
try:
return load_pem_public_key(pubkey.encode(), default_backend())
except ValueError:
# workaround for https://github.com/travis-ci/travis-api/issues/196
pubkey = pubkey.replace('BEGIN RSA', 'BEGIN').replace('END RSA', 'END')
return load_pem_public_key(pubkey.encode(), default_backend())
def encrypt(pubkey, password):
"""Encrypt password using given RSA public key and encode it with base64.
The encrypted password can only be decrypted by someone with the
private key (in this case, only Travis).
"""
key = load_key(pubkey)
encrypted_password = key.encrypt(password, PKCS1v15())
return base64.b64encode(encrypted_password)
def fetch_public_key(repo):
"""Download RSA public key Travis will use for this repo.
Travis API docs: http://docs.travis-ci.com/api/#repository-keys
"""
keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo)
data = json.loads(urlopen(keyurl).read().decode())
if 'key' not in data:
errmsg = "Could not find public key for repo: {}.\n".format(repo)
errmsg += "Have you already added your GitHub repo to Travis?"
raise ValueError(errmsg)
return data['key']
def prepend_line(filepath, line):
"""Rewrite a file adding a line to its beginning.
"""
with open(filepath) as f:
lines = f.readlines()
lines.insert(0, line)
with open(filepath, 'w') as f:
f.writelines(lines)
def load_yaml_config(filepath):
with open(filepath) as f:
return yaml.load(f)
def save_yaml_config(filepath, config):
with open(filepath, 'w') as f:
yaml.dump(config, f, default_flow_style=False)
def update_travis_deploy_password(encrypted_password):
"""Update the deploy section of the .travis.yml file
to use the given encrypted password.
"""
config = load_yaml_config(TRAVIS_CONFIG_FILE)
config['deploy']['password'] = dict(secure=encrypted_password)
save_yaml_config(TRAVIS_CONFIG_FILE, config)
line = ('# This file was autogenerated and will overwrite'
' each time you run travis_pypi_setup.py\n')
prepend_line(TRAVIS_CONFIG_FILE, line)
def main(args):
public_key = fetch_public_key(args.repo)
password = args.password or getpass('PyPI password: ')
update_travis_deploy_password(encrypt(public_key, password.encode()))
print("Wrote encrypted password to .travis.yml -- you're ready to deploy")
if '__main__' == __name__:
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--repo', default=GITHUB_REPO,
help='GitHub repo (default: %s)' % GITHUB_REPO)
parser.add_argument('--password',
help='PyPI password (will prompt if not provided)')
args = parser.parse_args()
main(args)
| martensm/pywaifu | travis_pypi_setup.py | Python | mit | 3,754 |
# Copyright 2016 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simulates carbon-cache after loading the carbon plugin.
This fail is required as there is no way to load a plugin from
carbon configuration, such as what
https://github.com/graphite-project/carbon/pull/139
implements.
"""
from __future__ import absolute_import
from __future__ import print_function
import sys
from biggraphite import utils as bg_utils
def main(_executable=sys.argv[0], _sys_path=sys.path):
"""The entry point of this module."""
bg_utils.manipulate_paths_like_upstream(_executable, _sys_path)
from carbon import util as carbon_util
from carbon import exceptions as carbon_exceptions
# Importing the plugin registers it.
from biggraphite.plugins import carbon as unused_carbon # noqa
if 'twisted.internet.reactor' in sys.modules:
del sys.modules['twisted.internet.reactor']
try:
# The carbon code tries to guess GRAPHITE_ROOT from the filename
# given to run_twistd_plugin() to set GRAPHITE_ROOT. This is then
# used to setup default paths. Try to make it somewhat compatible
# when carbon is installed in its default directory.
bg_utils.setup_graphite_root_path(carbon_util.__file__)
carbon_util.run_twistd_plugin("carbon-cache")
except carbon_exceptions.CarbonConfigException as exc:
# This is what carbon cache does, we preserve that behaviour.
raise SystemExit(str(exc))
if __name__ == "__main__":
main()
| criteo/biggraphite | biggraphite/cli/bg_carbon_cache.py | Python | apache-2.0 | 2,020 |
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.utils.encoding import force_text
# these must be directly imported; this module is imported by `models.orders`
from shoop.core.models.counters import Counter, CounterType
from shoop.utils.importing import load
def calc_reference_number_checksum(rn):
muls = (7, 3, 1)
s = 0
for i, ch in enumerate(rn[::-1]):
s += muls[i % 3] * int(ch)
s = 10 - (s % 10)
return force_text(s)[-1]
def get_unique_reference_number(order):
now = datetime.datetime.now()
dt = "%012s%07d%04d" % (now.strftime("%y%m%d%H%M%S"), now.microsecond * 1000000, order.pk % 1000)
return dt + calc_reference_number_checksum(dt)
def get_running_reference_number(order):
value = Counter.get_and_increment(CounterType.ORDER_REFERENCE)
prefix = settings.SHOOP_REFERENCE_NUMBER_PREFIX
padded_value = force_text(value).rjust(settings.SHOOP_REFERENCE_NUMBER_LENGTH - len(prefix), "0")
reference_no = "%s%s" % (prefix, padded_value)
return reference_no + calc_reference_number_checksum(reference_no)
def get_shop_running_reference_number(order):
value = Counter.get_and_increment(CounterType.ORDER_REFERENCE)
prefix = "%06d" % order.shop.pk
padded_value = force_text(value).rjust(settings.SHOOP_REFERENCE_NUMBER_LENGTH - len(prefix), "0")
reference_no = "%s%s" % (prefix, padded_value)
return reference_no + calc_reference_number_checksum(reference_no)
def get_reference_number(order):
if order.reference_number:
raise ValueError("Order passed to get_reference_number() already has a reference number")
reference_number_method = settings.SHOOP_REFERENCE_NUMBER_METHOD
if reference_number_method == "unique":
return get_unique_reference_number(order)
elif reference_number_method == "running":
return get_running_reference_number(order)
elif reference_number_method == "shop_running":
return get_shop_running_reference_number(order)
elif callable(reference_number_method):
return reference_number_method(order)
else:
getter = load(reference_number_method, "Reference number generator")
return getter(order)
def get_order_identifier(order):
if order.identifier:
raise ValueError("Order passed to get_order_identifier() already has an identifier")
order_identifier_method = settings.SHOOP_ORDER_IDENTIFIER_METHOD
if order_identifier_method == "id":
return force_text(order.id)
elif callable(order_identifier_method):
return order_identifier_method(order)
else:
getter = load(order_identifier_method, "Order identifier generator")
return getter(order)
| lawzou/shoop | shoop/core/utils/reference.py | Python | agpl-3.0 | 2,988 |
from __future__ import print_function
import sys, os, time
from h2o.exceptions import H2OTypeError
sys.path.insert(1, os.path.join("..","..",".."))
import h2o
from tests import pyunit_utils
from h2o.automl import H2OAutoML
"""
Those tests check time constraints on AutoML runs.
"""
def import_dataset(seed=0, larger=False):
df = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/{}".format("prostate_complete.csv.zip" if larger else "prostate.csv")))
target = "CAPSULE"
df[target] = df[target].asfactor()
#Split frames
fr = df.split_frame(ratios=[.8,.1], seed=seed)
#Set up train, validation, and test sets
return dict(train=fr[0], valid=fr[1], test=fr[2], target=target, target_idx=1)
def test_automl_stops_after_max_runtime_secs():
print("Check that automl gets interrupted after `max_runtime_secs`")
max_runtime_secs = 30
cancel_tolerance_secs = 5+5 # should work for most cases given current mechanism, +5 due to SE which currently ignore max_runtime_secs
ds = import_dataset()
aml = H2OAutoML(project_name="py_aml_max_runtime_secs", seed=1, max_runtime_secs=max_runtime_secs)
start = time.time()
aml.train(y=ds['target'], training_frame=ds['train'])
end = time.time()
assert abs(end-start - max_runtime_secs) < cancel_tolerance_secs, end-start
def test_no_model_takes_more_than_max_runtime_secs_per_model():
print("Check that individual model get interrupted after `max_runtime_secs_per_model`")
ds = import_dataset(seed=1, larger=True)
max_runtime_secs = 30
models_count = {}
for max_runtime_secs_per_model in [0, 3, max_runtime_secs]:
aml = H2OAutoML(project_name="py_aml_max_runtime_secs_per_model_{}".format(max_runtime_secs_per_model), seed=1,
max_runtime_secs_per_model=max_runtime_secs_per_model,
max_runtime_secs=max_runtime_secs)
aml.train(y=ds['target'], training_frame=ds['train'])
models_count[max_runtime_secs_per_model] = len(aml.leaderboard)
# print(aml.leaderboard)
# there may be one model difference as reproducibility is not perfectly guaranteed in time-bound runs
assert abs(models_count[0] - models_count[max_runtime_secs]) <= 1
assert abs(models_count[0] - models_count[3]) > 1
# TODO: add assertions about single model timing once 'automl event_log' is available on client side
pyunit_utils.run_tests([
test_automl_stops_after_max_runtime_secs,
test_no_model_takes_more_than_max_runtime_secs_per_model,
])
| michalkurka/h2o-3 | h2o-py/tests/testdir_algos/automl/pyunit_NOPASS_automl_timing.py | Python | apache-2.0 | 2,546 |
"""
Parses a variety of ``Accept-*`` headers.
These headers generally take the form of::
value1; q=0.5, value2; q=0
Where the ``q`` parameter is optional. In theory other parameters
exists, but this ignores them.
"""
import re
try:
sorted
except NameError:
from webob.compat import sorted
part_re = re.compile(
r',\s*([^\s;,\n]+)(?:[^,]*?;\s*q=([0-9.]*))?')
def parse_accept(value):
"""
Parses an ``Accept-*`` style header.
A list of ``[(value, quality), ...]`` is returned. ``quality``
will be 1 if it was not given.
"""
result = []
for match in part_re.finditer(','+value):
name = match.group(1)
if name == 'q':
continue
quality = match.group(2) or ''
if not quality:
quality = 1
else:
try:
quality = max(min(float(quality), 1), 0)
except ValueError:
quality = 1
result.append((name, quality))
return result
class Accept(object):
"""
Represents a generic ``Accept-*`` style header.
This object should not be modified. To add items you can use
``accept_obj + 'accept_thing'`` to get a new object
"""
def __init__(self, header_name, header_value):
self.header_name = header_name
self.header_value = header_value
self._parsed = parse_accept(header_value)
def __repr__(self):
return '<%s at %x %s: %s>' % (
self.__class__.__name__,
abs(id(self)),
self.header_name, str(self))
def __str__(self):
result = []
for match, quality in self._parsed:
if quality != 1:
match = '%s;q=%0.1f' % (match, quality)
result.append(match)
return ', '.join(result)
# FIXME: should subtraction be allowed?
def __add__(self, other, reversed=False):
if isinstance(other, Accept):
other = other.header_value
if hasattr(other, 'items'):
other = sorted(other.items(), key=lambda item: -item[1])
if isinstance(other, (list, tuple)):
result = []
for item in other:
if isinstance(item, (list, tuple)):
name, quality = item
result.append('%s; q=%s' % (name, quality))
else:
result.append(item)
other = ', '.join(result)
other = str(other)
my_value = self.header_value
if reversed:
other, my_value = my_value, other
if not other:
new_value = my_value
elif not my_value:
new_value = other
else:
new_value = my_value + ', ' + other
return self.__class__(self.header_name, new_value)
def __radd__(self, other):
return self.__add__(other, True)
def __contains__(self, match):
"""
Returns true if the given object is listed in the accepted
types.
"""
for item, quality in self._parsed:
if self._match(item, match):
return True
def quality(self, match):
"""
Return the quality of the given match. Returns None if there
is no match (not 0).
"""
for item, quality in self._parsed:
if self._match(item, match):
return quality
return None
def first_match(self, matches):
"""
Returns the first match in the sequences of matches that is
allowed. Ignores quality. Returns the first item if nothing
else matches; or if you include None at the end of the match
list then that will be returned.
"""
if not matches:
raise ValueError(
"You must pass in a non-empty list")
for match in matches:
for item, quality in self._parsed:
if self._match(item, match):
return match
if match is None:
return None
return matches[0]
def best_match(self, matches, default_match=None):
"""
Returns the best match in the sequence of matches.
The sequence can be a simple sequence, or you can have
``(match, server_quality)`` items in the sequence. If you
have these tuples then the client quality is multiplied by the
server_quality to get a total.
default_match (default None) is returned if there is no intersection.
"""
best_quality = -1
best_match = default_match
for match_item in matches:
if isinstance(match_item, (tuple, list)):
match, server_quality = match_item
else:
match = match_item
server_quality = 1
for item, quality in self._parsed:
possible_quality = server_quality * quality
if possible_quality < best_quality:
continue
if self._match(item, match):
best_quality = possible_quality
best_match = match
return best_match
def best_matches(self, fallback=None):
"""
Return all the matches in order of quality, with fallback (if
given) at the end.
"""
items = [
i for i, q in sorted(self._parsed, key=lambda iq: -iq[1])]
if fallback:
for index, item in enumerate(items):
if self._match(item, fallback):
items[index+1:] = []
break
else:
items.append(fallback)
return items
def _match(self, item, match):
return item.lower() == match.lower() or item == '*'
class NilAccept(object):
"""
Represents an Accept header with no value.
"""
MasterClass = Accept
def __init__(self, header_name):
self.header_name = header_name
def __repr__(self):
return '<%s for %s: %s>' % (
self.__class__.__name__, self.header_name, self.MasterClass)
def __str__(self):
return ''
def __add__(self, item):
if isinstance(item, self.MasterClass):
return item
else:
return self.MasterClass(self.header_name, '') + item
def __radd__(self, item):
if isinstance(item, self.MasterClass):
return item
else:
return item + self.MasterClass(self.header_name, '')
def __contains__(self, item):
return True
def quality(self, match, default_quality=1):
return 0
def first_match(self, matches):
return matches[0]
def best_match(self, matches, default_match=None):
best_quality = -1
best_match = default_match
for match_item in matches:
if isinstance(match_item, (list, tuple)):
match, quality = match_item
else:
match = match_item
quality = 1
if quality > best_quality:
best_match = match
best_quality = quality
return best_match
def best_matches(self, fallback=None):
if fallback:
return [fallback]
else:
return []
class NoAccept(NilAccept):
def __contains__(self, item):
return False
class MIMEAccept(Accept):
"""
Represents the ``Accept`` header, which is a list of mimetypes.
This class knows about mime wildcards, like ``image/*``
"""
def _match(self, item, match):
item = item.lower()
if item == '*':
item = '*/*'
match = match.lower()
if match == '*':
match = '*/*'
if '/' not in item:
# Bad, but we ignore
return False
if '/' not in match:
raise ValueError(
"MIME matches must include / (bad: %r)" % match)
item_major, item_minor = item.split('/', 1)
match_major, match_minor = match.split('/', 1)
if match_major == '*' and match_minor != '*':
raise ValueError(
"A MIME type of %r doesn't make sense" % match)
if item_major == '*' and item_minor != '*':
# Bad, but we ignore
return False
if ((item_major == '*' and item_minor == '*')
or (match_major == '*' and match_minor == '*')):
return True
if (item_major == match_major
and ((item_minor == '*' or match_minor == '*')
or item_minor == match_minor)):
return True
return False
def accept_html(self):
"""
Returns true if any HTML-like type is accepted
"""
return ('text/html' in self
or 'application/xhtml+xml' in self
or 'application/xml' in self
or 'text/xml' in self)
class MIMENilAccept(NilAccept):
MasterClass = MIMEAccept
| sizzlelab/pysmsd | extras/webob/acceptparse.py | Python | mit | 8,996 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
from ....tests.helper import pytest
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
astropy_path = os.path.abspath(
os.path.join(setup_path, '..', '..', '..', '..'))
env = os.environ.copy()
paths = [str(tmpdir), astropy_path]
if env.get('PYTHONPATH'):
paths.append(env.get('PYTHONPATH'))
env[str('PYTHONPATH')] = str(os.pathsep.join(paths))
# Build the extension
# This used to use subprocess.check_call, but on Python 3.4 there was
# a mysterious Heisenbug causing this to fail with a non-zero exit code
# *unless* the output is redirected. This bug also did not occur in an
# interactive session, so it likely had something to do with pytest's
# output capture
p = subprocess.Popen([sys.executable, 'setup.py', 'install',
'--install-lib={0}'.format(tmpdir),
astropy_path], cwd=setup_path, env=env,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Whether the process fails or not this isn't likely to produce a great
# deal of output so communicate should be fine in almost all cases
stdout, stderr = p.communicate()
try:
stdout, stderr = stdout.decode('utf8'), stderr.decode('utf8')
except UnicodeDecodeError:
# Don't try to guess about encoding; just display the text
stdout, stderr = stdout.decode('latin1'), stderr.decode('latin1')
# If compilation fails, we can skip this test, since the
# dependencies necessary to compile an extension may be missing.
# If it passes, however, we want to continue and ensure that the
# extension created is actually usable. However, if we're on
# Travis-CI, or another generic continuous integration setup, we
# don't want to ever skip, because having it fail in that
# environment probably indicates something more serious that we
# want to know about.
if (not (str('CI') in os.environ or
str('TRAVIS') in os.environ or
str('CONTINUOUS_INTEGRATION') in os.environ) and
p.returncode):
pytest.skip("system unable to compile extensions")
return
assert p.returncode == 0, (
"setup.py exited with non-zero return code {0}\n"
"stdout:\n\n{1}\n\nstderr:\n\n{2}\n".format(
p.returncode, stdout, stderr))
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call([sys.executable, '-c', code], env=env)
| piotroxp/scibibscan | scib/lib/python3.5/site-packages/astropy/wcs/tests/extension/test_extension.py | Python | mit | 2,891 |
#!/usr/bin/env python
# Copyright 2015 Matthew Wall
# See the file LICENSE.txt for your rights.
#
# Credits:
# Thanks to Benji for the identification and decoding of 7 packet types
#
# Thanks to Eric G for posting USB captures and providing hardware for testing
# https://groups.google.com/forum/#!topic/weewx-development/5R1ahy2NFsk
#
# Thanks to Zahlii
# https://bsweather.myworkbook.de/category/weather-software/
#
# No thanks to oregon scientific - repeated requests for hardware and/or
# specifications resulted in no response at all.
# TODO: figure out battery level for each sensor
# TODO: figure out signal strength for each sensor
# TODO: figure out archive interval
# FIXME: figure out unknown bytes in history packet
# FIXME: decode the 0xdb packets
# FIXME: the read/write logic is rather brittle. it appears that communication
# must be initiated with an interrupt write. after that, the station will
# spew data. this implementation starts with a read, which will fail with
# a 'No data available' usb error. that results in an empty buffer (instead
# of an exception popping up the stack) so that a heartbeat write is sent.
# the genLoopPacket and genStartupRecords logic should be refactored to make
# this behiavor explicit.
# FIXME: deal with initial usb timeout when starting usb communications
# FIXME: warn if altitude in pressure packet does not match weewx altitude
"""Driver for Oregon Scientific WMR300 weather stations.
Sensor data transmission frequencies:
wind: 2.5 to 3 seconds
TH: 10 to 12 seconds
rain: 20 to 24 seconds
The station supports 1 wind, 1 rain, 1 UV, and up to 8 temperature/humidity
sensors.
Sniffing USB traffic shows all communication is interrupt. The endpoint
descriptors for the device show this as well. Response timing is 1.
The station ships with "Weather OS PRO" software for windows. This was used
for the USB sniffing.
Internal observation names use the convention name_with_specifier. These are
mapped to the wview or other schema as needed with a configuration setting.
For example, for the wview schema, wind_speed maps to windSpeed, temperature_0
maps to inTemp, and humidity_1 maps to outHumidity.
Maximum value for rain counter is 400 in (10160 mm) (40000 = 0x9c 0x40). The
counter does not wrap; it must be reset when it hits maximum value otherwise
rain data will not be recorded.
Message types -----------------------------------------------------------------
packet types from station:
57 - station type/model; history count
41 - ACK
D2 - history; 128 bytes
D3 - temperature/humidity/dewpoint/heatindex; 61 bytes
D4 - wind/windchill; 54 bytes
D5 - rain; 40 bytes
D6 - pressure; 46 bytes
DB - forecast; 32 bytes
DC - temperature/humidity ranges; 62 bytes
packet types from host:
A6 - heartbeat
41 - ACK
65 - ? each of these is ack-ed by the station
cd - ? start history request? last two bytes are one after most recent read
35 - ? finish history request? last two bytes are latest record count
72 - ?
73 - ?
notes:
WOP sends A6 message every 20 seconds
WOP requests history at startup, then again every 120 minutes
each A6 is followed by a 57 from the station
each data packet D* from the station is followed by an ack packet 41 from host
D2 (history) records are recorded every minute
D6 (pressure) packets seem to come every 15 minutes (900 seconds)
4,5 of 7x match 12,13 of 57
Message field decodings -------------------------------------------------------
Values are stored in 1 to 3 bytes in big endian order. Negative numbers are
stored as Two's Complement (if the first byte starts with F it is a negative
number).
no data:
7f ff
values for channel number:
0 - console sensor
1 - sensor 1
2 - sensor 2
...
8 - sensor 8
values for trend:
0 - steady
1 - rising
2 - falling
bitwise transformation for compass direction:
1000 0000 0000 0000 = NNW
0100 0000 0000 0000 = NW
0010 0000 0000 0000 = WNW
0001 0000 0000 0000 = W
0000 1000 0000 0000 = WSW
0000 0100 0000 0000 = SW
0000 0010 0000 0000 = SSW
0000 0001 0000 0000 = S
0000 0000 1000 0000 = SSE
0000 0000 0100 0000 = SE
0000 0000 0010 0000 = ESE
0000 0000 0001 0000 = E
0000 0000 0000 1000 = ENE
0000 0000 0000 0100 = NE
0000 0000 0000 0010 = NNE
0000 0000 0000 0001 = N
values for forecast:
0x08 - cloudy
0x0c - rainy
0x1e - partly cloudy
0x0e - partly cloudy at night
0x70 - sunny
0x00 - clear night
Message decodings -------------------------------------------------------------
message: ACK
byte hex dec description decoded value
0 41 A acknowledgement ACK
1 43 C
2 4b K
3 73
4 e5
5 0a
6 26
7 0e
8 c1
examples:
41 43 4b 73 e5 0a 26 0e c1
41 43 4b 65 19 e5 04
message: station info
byte hex dec description decoded value
0 57 W station type WMR300
1 4d M
2 52 R
3 33 3
4 30 0
5 30 0
6 2c ,
7 41 A station model A002
8 30 0
9 30 0
10 32 2
11 2c ,
12 0e
13 c1
14 00
15 00
16 2c ,
17 67 lastest history record 26391 (0x67*256 0x17)
18 17
19 2c ,
20 4b
21 2c ,
22 52
23 2c ,
examples:
57 4d 52 33 30 30 2c 41 30 30 32 2c 0e c1 00 00 2c 67 17 2c 4b 2c 52 2c
57 4d 52 33 30 30 2c 41 30 30 32 2c 88 8b 00 00 2c 2f b5 2c 4b 2c 52 2c
57 4d 52 33 30 30 2c 41 30 30 34 2c 0e c1 00 00 2c 7f e0 2c 4b 2c 49 2c
57 4d 52 33 30 30 2c 41 30 30 34 2c 88 8b 00 00 2c 7f e0 2c 4b 2c 49 2c
message: history
byte hex dec description decoded value
0 d2 packet type
1 80 128 packet length
2 31 count 12694
3 96
4 0f 15 year ee if not set
5 08 8 month ee if not set
6 0a 10 day ee if not set
7 06 6 hour
8 02 2 minute
9 00 temperature 0 21.7 C
10 d9
11 00 temperature 1 25.4 C
12 fe
13 7f temperature 2
14 ff
15 7f temperature 3
16 ff
17 7f temperature 4
18 ff
19 7f temperature 5
20 ff
21 7f temperature 6
22 ff
23 7f temperature 7
24 ff
25 7f temperature 8
26 ff (a*256 + b)/10
27 26 humidity 0 38 %
28 49 humidity 1 73 %
29 7f humidity 2
30 7f humidity 3
31 7f humidity 4
32 7f humidity 5
33 7f humidity 6
34 7f humidity 7
35 7f humidity 8
36 00 dewpoint 1 20.0 C
37 c8 (a*256 + b)/10
38 7f dewpoint 2
39 ff
40 7f dewpoint 3
41 ff
42 7f dewpoint 4
43 ff
44 7f dewpoint 5
45 ff
46 7f dewpoint 6
47 ff
48 7f dewpoint 7
49 ff
50 7f dewpoint 8
51 ff
52 7f heat index 1 C
53 fd (a*256 + b)/10
54 7f heat index 2
55 ff
56 7f heat index 3
57 ff
58 7f heat index 4
59 ff
60 7f heat index 5
61 ff
62 7f heat index 6
63 ff
64 7f heat index 7
65 ff
66 7f heat index 8
67 ff
68 7f wind chill C
69 fd (a*256 + b)/10
70 7f ?
71 ff ?
72 00 wind gust speed 0.0 m/s
73 00 (a*256 + b)/10
74 00 wind average speed 0.0 m/s
75 00 (a*256 + b)/10
76 01 wind gust direction 283 degrees
77 1b (a*256 + b)
78 01 wind average direction 283 degrees
78 1b (a*256 + b)
80 30 forecast
81 00 ?
82 00 ?
83 00 hourly rain hundredths_of_inch
84 00 (a*256 + b)
85 00 ?
86 00 accumulated rain hundredths_of_inch
87 03 (a*256 + b)
88 0f accumulated rain start year
89 07 accumulated rain start month
90 09 accumulated rain start day
91 13 accumulated rain start hour
92 09 accumulated rain start minute
93 00 rain rate hundredths_of_inch/hour
94 00 (a*256 + b)
95 26 pressure mbar
96 ab (a*256 + b)/10
97 01 pressure trend
98 7f ?
99 ff ?
100 7f ?
101 ff ?
102 7f ?
103 ff ?
104 7f ?
105 ff ?
106 7f ?
107 7f ?
108 7f ?
109 7f ?
110 7f ?
111 7f ?
112 7f ?
113 7f ?
114 ff ?
115 7f ?
116 ff ?
117 7f ?
118 ff ?
119 00 ?
120 00 ?
121 00 ?
122 00 ?
123 00 ?
124 00 ?
125 00 ?
126 f8 checksum
127 3b
message: temperature/humidity/dewpoint
byte hex dec description decoded value
0 D3 packet type
1 3D 61 packet length
2 0E 14 year
3 05 5 month
4 09 9 day
5 12 12 hour
6 14 20 minute
7 01 1 channel number
8 00 temperature 19.5 C
9 C3
10 2D humidity 45 %
11 00 dewpoint 7.0 C
12 46
13 7F heat index N/A
14 FD
15 00 temperature trend
16 00 humidity trend
17 0E 14 max_dewpoint_last_day year
18 05 5 month
19 09 9 day
20 0A 10 hour
21 24 36 minute
22 00 max_dewpoint_last_day 13.0 C
23 82
24 0E 14 min_dewpoint_last_day year
25 05 5 month
26 09 9 day
27 10 16 hour
28 1F 31 minute
29 00 min_dewpoint_last_day 6.0 C
30 3C
31 0E 14 max_dewpoint_last_month year
32 05 5 month
33 01 1 day
34 0F 15 hour
35 1B 27 minute
36 00 max_dewpoint_last_month 13.0 C
37 82
38 0E 14 min_dewpoint_last_month year
39 05 5 month
40 04 4 day
41 0B 11 hour
42 08 8 minute
43 FF min_dewpoint_last_month -1.0 C
44 F6
45 0E 14 max_heat_index year
46 05 5 month
47 09 9 day
48 00 0 hour
49 00 0 minute
50 7F max_heat_index N/A
51 FF
52 0E 14 min_heat_index year
53 05 5 month
54 01 1 day
55 00 0 hour
56 00 0 minute
57 7F min_heat_index N/A
58 FF
59 0B checksum
60 63
0 41 ACK
1 43
2 4B
3 D3 packet type
4 01 channel number
5 8B sometimes DF
examples:
41 43 4b d3 01 20
41 43 4b d3 00 20
message: wind
byte hex dec description decoded value
0 D4 packet type
1 36 54 packet length
2 0E 14 year
3 05 5 month
4 09 9 day
5 12 18 hour
6 14 20 minute
7 01 1 channel number
8 00 gust speed 1.4 m/s
9 0E
10 00 gust direction 168 degrees
11 A8
12 00 average speed 2.9 m/s
13 1D
14 00 average direction 13 degrees
15 0D
16 00 compass direction 3 N/NNE
17 03
18 7F windchill 32765 N/A
19 FD
20 0E 14 gust today year
21 05 5 month
22 09 9 day
23 10 16 hour
24 3B 59 minute
25 00 gust today 10 m/s
26 64
27 00 gust direction today 39 degree
28 27
29 0E 14 gust this month year
30 05 5 month
31 09 9 day
32 10 16 hour
33 3B 59 minute
34 00 gust this month 10 m/s
35 64
36 00 gust direction this month 39 degree
37 27
38 0E 14 wind chill today year
39 05 5 month
40 09 9 day
41 00 0 hour
42 00 0 minute
43 7F windchill today N/A
44 FF
45 0E 14 windchill this month year
46 05 5 month
47 03 3 day
48 09 9 hour
49 04 4 minute
50 00 windchill this month 2.9 C
51 1D
52 07 checksum
53 6A
0 41 ACK
1 43
2 4B
3 D4 packet type
4 01 channel number
5 8B
examples:
41 43 4b d4 01 20
41 43 4b d4 01 16
message: rain
byte hex dec description decoded value
0 D5 packet type
1 28 40 packet length
2 0E 14 year
3 05 5 month
4 09 9 day
5 12 18 hour
6 15 21 minute
7 01 1 channel number
8 00
9 00 rainfall this hour 0 inch
10 00
11 00
12 00 rainfall last 24 hours 0.12 inch
13 0C 12
14 00
15 00 rainfall accumulated 1.61 inch
16 A1 161
17 00 rainfall rate 0 inch/hr
18 00
19 0E 14 accumulated start year
20 04 4 month
21 1D 29 day
22 12 18 hour
23 00 0 minute
24 0E 14 max rate last 24 hours year
25 05 5 month
26 09 9 day
27 01 1 hour
28 0C 12 minute
29 00 0 max rate last 24 hours 0.11 inch/hr ((0x00<<8)+0x0b)/100.0
30 0B 11
31 0E 14 max rate last month year
32 05 5 month
33 02 2 day
34 04 4 hour
35 0C 12 minute
36 00 0 max rate last month 1.46 inch/hr ((0x00<<8)+0x92)/100.0
37 92 146
38 03 checksum 794 = (0x03<<8) + 0x1a
39 1A
0 41 ACK
1 43
2 4B
3 D5 packet type
4 01 channel number
5 8B
examples:
41 43 4b d5 01 20
41 43 4b d5 01 16
message: pressure
byte hex dec description decoded value
0 D6 packet type
1 2E 46 packet length
2 0E 14 year
3 05 5 month
4 0D 13 day
5 0E 14 hour
6 30 48 minute
7 00 1 channel number
8 26 station pressure 981.7 mbar ((0x26<<8)+0x59)/10.0
9 59
10 27 sea level pressure 1015.3 mbar ((0x27<<8)+0xa9)/10.0
11 A9
12 01 altitude meter 300 m (0x01<<8)+0x2c
13 2C
14 03 ?
15 00
16 0E 14 max pressure today year
17 05 5 max pressure today month
18 0D 13 max pressure today day
19 0C 12 max pressure today hour
20 33 51 max pressure today minute
21 27 max pressure today 1015.7 mbar
22 AD
23 0E 14 min pressure today year
24 05 5 min pressure today month
25 0D 13 min pressure today day
26 00 0 min pressure today hour
27 06 6 min pressure today minute
28 27 min pressure today 1014.1 mbar
29 9D
30 0E 14 max pressure month year
31 05 5 max pressure month month
32 04 4 max pressure month day
33 01 1 max pressure month hour
34 15 21 max pressure month minute
35 27 max pressure month 1022.5 mbar
36 F1
37 0E 14 min pressure month year
38 05 5 min pressure month month
39 0B 11 min pressure month day
40 00 0 min pressure month hour
41 06 6 min pressure month minute
42 27 min pressure month 1007.8 mbar
43 5E
44 06 checksum
45 EC
0 41 ACK
1 43
2 4B
3 D6 packet type
4 00 channel number
5 8B
examples:
41 43 4b d6 00 20
message: forecast
byte hex dec description decoded value
0 DB
1 20
2 0F 15 year
3 07 7 month
4 09 9 day
5 12 18 hour
6 23 35 minute
7 00
8 FA
9 79
10 FC
11 40
12 01
13 4A
14 06
15 17
16 14
17 23
18 06
19 01
20 00
21 00
22 01
23 01
24 01
25 00
26 00
27 00
28 FE
29 00
30 05 checksum
31 A5
0 41 ACK
1 43
2 4B
3 D6 packet type
4 00 channel number
5 20
examples:
41 43 4b db 00 20
message: temperature/humidity ranges
byte hex dec description decoded value
0 DC packet type
1 3E 62 packet length
2 0E 14 year
3 05 5 month
4 0D 13 day
5 0E 14 hour
6 30 48 minute
7 00 0 channel number
8 0E 14 max temp today year
9 05 5 month
10 0D 13 day
11 00 0 hour
12 00 0 minute
13 00 max temp today 20.8 C
14 D0
15 0E 14 min temp today year
16 05 5 month
17 0D 13 day
18 0B 11 hour
19 34 52 minute
20 00 min temp today 19.0 C
21 BE
22 0E 14 max temp month year
23 05 5 month
24 0A 10 day
25 0D 13 hour
26 19 25 minute
27 00 max temp month 21.4 C
28 D6
29 0E 14 min temp month year
30 05 5 month
31 04 4 day
32 03 3 hour
33 2A 42 minute
34 00 min temp month 18.1 C
35 B5
36 0E 14 max humidity today year
37 05 5 month
38 0D 13 day
39 05 5 hour
40 04 4 minute
41 45 max humidity today 69 %
42 0E 14 min numidity today year
43 05 5 month
44 0D 13 day
45 0B 11 hour
46 32 50 minute
47 41 min humidity today 65 %
48 0E 14 max humidity month year
49 05 5 month
50 0C 12 day
51 13 19 hour
52 32 50 minute
53 46 max humidity month 70 %
54 0E 14 min humidity month year
55 05 5 month
56 04 4 day
57 14 20 hour
58 0E 14 minute
59 39 min humidity month 57 %
60 07 checksum
61 BF
0 41 ACK
1 43
2 4B
3 DC packet type
4 00 0 channel number
5 8B
examples:
41 43 4b dc 01 20
41 43 4b dc 00 20
41 43 4b dc 01 16
41 43 4b dc 00 16
"""
from __future__ import with_statement
import syslog
import time
import usb
import weewx.drivers
import weewx.wxformulas
from weeutil.weeutil import timestamp_to_string
DRIVER_NAME = 'WMR300'
DRIVER_VERSION = '0.18'
DEBUG_COMM = 0
DEBUG_PACKET = 0
DEBUG_COUNTS = 0
DEBUG_DECODE = 0
DEBUG_HISTORY = 0
DEBUG_RAIN = 1
def loader(config_dict, _):
return WMR300Driver(**config_dict[DRIVER_NAME])
def confeditor_loader():
return WMR300ConfEditor()
def logmsg(level, msg):
syslog.syslog(level, 'wmr300: %s' % msg)
def logdbg(msg):
logmsg(syslog.LOG_DEBUG, msg)
def loginf(msg):
logmsg(syslog.LOG_INFO, msg)
def logerr(msg):
logmsg(syslog.LOG_ERR, msg)
def logcrt(msg):
logmsg(syslog.LOG_CRIT, msg)
def _fmt_bytes(data):
return ' '.join(['%02x' % x for x in data])
def _lo(x):
return x - 256 * (x >> 8)
def _hi(x):
return x >> 8
# pyusb 0.4.x does not provide an errno or strerror with the usb errors that
# it wraps into USBError. so we have to compare strings to figure out exactly
# what type of USBError we are dealing with. unfortunately, those strings are
# localized, so we must compare in every language.
KNOWN_USB_MESSAGES = [
'No data available', 'No error',
'Nessun dato disponibile', 'Nessun errore',
'Keine Daten verf',
'No hay datos disponibles',
'Pas de donn',
'Ingen data er tilgjengelige']
# these are the usb 'errors' that should be ignored
def known_usb_err(e):
errmsg = repr(e)
for msg in KNOWN_USB_MESSAGES:
if msg in errmsg:
return True
return False
def get_usb_info():
pyusb_version = '0.4.x'
try:
pyusb_version = usb.__version__
except AttributeError:
pass
return "pyusb_version=%s" % pyusb_version
class WMR300Driver(weewx.drivers.AbstractDevice):
"""weewx driver that communicates with a WMR300 weather station."""
# the default map is for the wview schema
DEFAULT_MAP = {
'pressure': 'pressure',
'barometer': 'barometer',
'windSpeed': 'wind_avg',
'windDir': 'wind_dir',
'windGust': 'wind_gust',
'windGustDir': 'wind_gust_dir',
'inTemp': 'temperature_0',
'outTemp': 'temperature_1',
'extraTemp1': 'temperature_2',
'extraTemp2': 'temperature_3',
'extraTemp3': 'temperature_4',
'extraTemp4': 'temperature_5',
'extraTemp5': 'temperature_6',
'extraTemp6': 'temperature_7',
'extraTemp7': 'temperature_8',
'inHumidity': 'humidity_0',
'outHumidity': 'humidity_1',
'extraHumid1': 'humidity_2',
'extraHumid2': 'humidity_3',
'extraHumid3': 'humidity_4',
'extraHumid4': 'humidity_5',
'extraHumid5': 'humidity_6',
'extraHumid6': 'humidity_7',
'extraHumid7': 'humidity_8',
'dewpoint': 'dewpoint_1',
'extraDewpoint1': 'dewpoint_2',
'extraDewpoint2': 'dewpoint_3',
'extraDewpoint3': 'dewpoint_4',
'extraDewpoint4': 'dewpoint_5',
'extraDewpoint5': 'dewpoint_6',
'extraDewpoint6': 'dewpoint_7',
'extraDewpoint7': 'dewpoint_8',
'heatindex': 'heatindex_1',
'extraHeatindex1': 'heatindex_2',
'extraHeatindex2': 'heatindex_3',
'extraHeatindex3': 'heatindex_4',
'extraHeatindex4': 'heatindex_5',
'extraHeatindex5': 'heatindex_6',
'extraHeatindex6': 'heatindex_7',
'extraHeatindex7': 'heatindex_8',
'windchill': 'windchill',
'rainRate': 'rain_rate'}
def __init__(self, **stn_dict):
loginf('driver version is %s' % DRIVER_VERSION)
loginf('usb info: %s' % get_usb_info())
self.model = stn_dict.get('model', 'WMR300')
self.sensor_map = dict(self.DEFAULT_MAP)
if 'sensor_map' in stn_dict:
self.sensor_map.update(stn_dict['sensor_map'])
loginf('sensor map is %s' % self.sensor_map)
self.heartbeat = 20 # how often to send a6 messages, in seconds
self.history_retry = 60 # how often to retry history, in seconds
global DEBUG_COMM
DEBUG_COMM = int(stn_dict.get('debug_comm', DEBUG_COMM))
global DEBUG_PACKET
DEBUG_PACKET = int(stn_dict.get('debug_packet', DEBUG_PACKET))
global DEBUG_COUNTS
DEBUG_COUNTS = int(stn_dict.get('debug_counts', DEBUG_COUNTS))
global DEBUG_DECODE
DEBUG_DECODE = int(stn_dict.get('debug_decode', DEBUG_DECODE))
global DEBUG_HISTORY
DEBUG_HISTORY = int(stn_dict.get('debug_history', DEBUG_HISTORY))
global DEBUG_RAIN
DEBUG_RAIN = int(stn_dict.get('debug_rain', DEBUG_RAIN))
self.last_rain = None
self.last_a6 = 0
self.last_65 = 0
self.last_7x = 0
self.last_record = 0
# FIXME: make the cache values age
# FIXME: do this generically so it can be used in other drivers
self.pressure_cache = dict()
self.station = Station()
self.station.open()
def closePort(self):
self.station.close()
self.station = None
@property
def hardware_name(self):
return self.model
def genLoopPackets(self):
while True:
try:
buf = self.station.read()
if buf:
pkt = Station.decode(buf)
if buf[0] in [0xd3, 0xd4, 0xd5, 0xd6, 0xdb, 0xdc]:
# send ack for most data packets
# FIXME: what is last number in the ACK?
# observed: 0x00 0x20 0xc1 0xc7 0xa0 0x99
cmd = [0x41, 0x43, 0x4b, buf[0], buf[7], _lo(self.last_record)]
self.station.write(cmd)
# we only care about packets with loop data
if pkt['packet_type'] in [0xd3, 0xd4, 0xd5, 0xd6]:
packet = self.convert_loop(pkt)
yield packet
if time.time() - self.last_a6 > self.heartbeat:
logdbg("request station status: %s (%02x)" %
(self.last_record, _lo(self.last_record)))
cmd = [0xa6, 0x91, 0xca, 0x45, 0x52, _lo(self.last_record)]
self.station.write(cmd)
self.last_a6 = time.time()
if self.last_7x == 0:
# FIXME: what are the 72/73 messages?
# observed:
# 73 e5 0a 26 0e c1
# 73 e5 0a 26 88 8b
# 72 a9 c1 60 52 00
# cmd = [0x72, 0xa9, 0xc1, 0x60, 0x52, 0x00]
cmd = [0x73, 0xe5, 0x0a, 0x26, 0x88, 0x8b]
# cmd = [0x73, 0xe5, 0x0a, 0x26, 0x0e, 0xc1]
self.station.write(cmd)
self.last_7x = time.time()
except usb.USBError, e:
if DEBUG_COMM:
logdbg("loop: "
"e.errno=%s e.strerror=%s e.message=%s repr=%s" %
(e.errno, e.strerror, e.message, repr(e)))
if not known_usb_err(e):
logerr("usb failure: %s" % e)
raise weewx.WeeWxIOError(e)
except (WrongLength, BadChecksum), e:
loginf(e)
time.sleep(0.001)
def genStartupRecords(self, since_ts):
loginf("reading records since %s" % timestamp_to_string(since_ts))
hbuf = None
last_ts = None
cnt = 0
while True:
try:
buf = self.station.read()
if buf:
if buf[0] == 0xd2:
hbuf = buf
buf = None
elif buf[0] == 0x7f and hbuf is not None:
# FIXME: need better indicator of second half history
buf = hbuf + buf
hbuf = None
if buf and buf[0] == 0xd2:
self.last_record = Station.get_record_index(buf)
ts = Station._extract_ts(buf[4:9])
if ts is not None and ts > since_ts:
keep = True if last_ts is not None else False
pkt = Station.decode(buf)
packet = self.convert_historical(pkt, ts, last_ts)
last_ts = ts
if keep:
logdbg("historical record: %s" % packet)
cnt += 1
yield packet
if buf and buf[0] == 0x57:
idx = Station.get_latest_index(buf)
msg = "count=%s last_index=%s latest_index=%s" % (
cnt, self.last_record, idx)
if self.last_record + 1 >= idx:
loginf("catchup complete: %s" % msg)
break
loginf("catchup in progress: %s" % msg)
if buf and buf[0] == 0x41 and buf[3] == 0x65:
nxtrec = Station.get_next_index(self.last_record)
logdbg("request records starting with %s" % nxtrec)
cmd = [0xcd, 0x18, 0x30, 0x62, _hi(nxtrec), _lo(nxtrec)]
self.station.write(cmd)
if time.time() - self.last_a6 > self.heartbeat:
logdbg("request station status: %s (%02x)" %
(self.last_record, _lo(self.last_record)))
cmd = [0xa6, 0x91, 0xca, 0x45, 0x52, _lo(self.last_record)]
self.station.write(cmd)
self.last_a6 = time.time()
if self.last_7x == 0:
# FIXME: what does 72/73 do?
cmd = [0x73, 0xe5, 0x0a, 0x26, 0x88, 0x8b]
self.station.write(cmd)
self.last_7x = time.time()
if time.time() - self.last_65 > self.history_retry:
logdbg("initiate record request: %s (%02x)" %
(self.last_record, _lo(self.last_record)))
cmd = [0x65, 0x19, 0xe5, 0x04, 0x52, _lo(self.last_record)]
self.station.write(cmd)
self.last_65 = time.time()
except usb.USBError, e:
if DEBUG_COMM:
logdbg("history: "
"e.errno=%s e.strerror=%s e.message=%s repr=%s" %
(e.errno, e.strerror, e.message, repr(e)))
if not known_usb_err(e):
logerr("usb failure: %s" % e)
raise weewx.WeeWxIOError(e)
except (WrongLength, BadChecksum), e:
loginf(e)
time.sleep(0.001)
def convert(self, pkt, ts):
# if debugging packets, log everything we got
if DEBUG_PACKET:
logdbg("raw packet: %s" % pkt)
# timestamp and unit system are the same no matter what
p = {'dateTime': ts, 'usUnits': weewx.METRICWX}
# map hardware names to the requested database schema names
for label in self.sensor_map:
if self.sensor_map[label] in pkt:
p[label] = pkt[self.sensor_map[label]]
# single variable to track last_rain assumes that any historical reads
# will happen before any loop reads, and no historical reads will
# happen after any loop reads. otherwise double-counting of rain
# events could happen.
if 'rain_total' in pkt:
p['rain'] = self.calculate_rain(pkt['rain_total'], self.last_rain)
if DEBUG_RAIN and pkt['rain_total'] != self.last_rain:
logdbg("rain=%s rain_total=%s last_rain=%s" %
(p['rain'], pkt['rain_total'], self.last_rain))
self.last_rain = pkt['rain_total']
if pkt['rain_total'] == Station.MAX_RAIN_MM:
loginf("rain counter maximum reached, counter reset required")
if DEBUG_PACKET:
logdbg("converted packet: %s" % p)
return p
def convert_historical(self, pkt, ts, last_ts):
p = self.convert(pkt, ts)
if last_ts is not None:
p['interval'] = ts - last_ts
return p
def convert_loop(self, pkt):
p = self.convert(pkt, int(time.time() + 0.5))
if 'pressure' in p:
# cache any pressure-related values
for x in ['pressure', 'barometer']:
self.pressure_cache[x] = p[x]
else:
# apply any cached pressure-related values
p.update(self.pressure_cache)
return p
@staticmethod
def calculate_rain(newtotal, oldtotal):
"""Calculate the rain difference given two cumulative measurements."""
if newtotal is not None and oldtotal is not None:
if newtotal >= oldtotal:
delta = newtotal - oldtotal
else:
loginf("rain counter decrement detected: new=%s old=%s" %
(newtotal, oldtotal))
delta = None
else:
loginf("possible missed rain event: new=%s old=%s" %
(newtotal, oldtotal))
delta = None
return delta
class WMR300Error(weewx.WeeWxIOError):
"""map station errors to weewx io errors"""
class WrongLength(WMR300Error):
"""bad packet length"""
class BadChecksum(WMR300Error):
"""bogus checksum"""
class Station(object):
# these identify the weather station on the USB
VENDOR_ID = 0x0FDE
PRODUCT_ID = 0xCA08
MESSAGE_LENGTH = 64
EP_IN = 0x81
EP_OUT = 0x01
MAX_RECORDS = 50000 # FIXME: what is maximum number of records?
MAX_RAIN_MM = 10160 # maximum value of rain counter, in mm
def __init__(self, vend_id=VENDOR_ID, prod_id=PRODUCT_ID):
self.vendor_id = vend_id
self.product_id = prod_id
self.handle = None
self.timeout = 100
self.interface = 0
self.recv_counts = dict()
self.send_counts = dict()
def __enter__(self):
self.open()
return self
def __exit__(self, _, value, traceback): # @UnusedVariable
self.close()
def open(self):
dev = self._find_dev(self.vendor_id, self.product_id)
if not dev:
raise WMR300Error("Unable to find station on USB: "
"cannot find device with "
"VendorID=0x%04x ProductID=0x%04x" %
(self.vendor_id, self.product_id))
self.handle = dev.open()
if not self.handle:
raise WMR300Error('Open USB device failed')
# FIXME: reset is actually a no-op for some versions of libusb/pyusb?
self.handle.reset()
# for HID devices on linux, be sure kernel does not claim the interface
try:
self.handle.detachKernelDriver(self.interface)
except (AttributeError, usb.USBError):
pass
# attempt to claim the interface
try:
self.handle.claimInterface(self.interface)
except usb.USBError, e:
self.close()
raise WMR300Error("Unable to claim interface %s: %s" %
(self.interface, e))
def close(self):
if self.handle is not None:
try:
self.handle.releaseInterface()
except (ValueError, usb.USBError), e:
logdbg("Release interface failed: %s" % e)
self.handle = None
def reset(self):
self.handle.reset()
def read(self, count=True):
buf = []
try:
buf = self.handle.interruptRead(
Station.EP_IN, self.MESSAGE_LENGTH, self.timeout)
if DEBUG_COMM:
logdbg("read: %s" % _fmt_bytes(buf))
if DEBUG_COUNTS and count:
self.update_count(buf, self.recv_counts)
except usb.USBError, e:
if DEBUG_COMM:
logdbg("read: e.errno=%s e.strerror=%s e.message=%s repr=%s" %
(e.errno, e.strerror, e.message, repr(e)))
if not known_usb_err(e):
raise
return buf
def write(self, buf):
if DEBUG_COMM:
logdbg("write: %s" % _fmt_bytes(buf))
# pad with zeros up to the standard message length
while len(buf) < self.MESSAGE_LENGTH:
buf.append(0x00)
sent = self.handle.interruptWrite(Station.EP_OUT, buf, self.timeout)
if DEBUG_COUNTS:
self.update_count(buf, self.send_counts)
return sent
# keep track of the message types for debugging purposes
@staticmethod
def update_count(buf, count_dict):
label = 'empty'
if buf and len(buf) > 0:
if buf[0] in [0xd3, 0xd4, 0xd5, 0xd6, 0xdb, 0xdc]:
# message type and channel for data packets
label = '%02x:%d' % (buf[0], buf[7])
elif (buf[0] in [0x41] and
buf[3] in [0xd3, 0xd4, 0xd5, 0xd6, 0xdb, 0xdc]):
# message type and channel for data ack packets
label = '%02x:%02x:%d' % (buf[0], buf[3], buf[4])
else:
# otherwise just track the message type
label = '%02x' % buf[0]
if label in count_dict:
count_dict[label] += 1
else:
count_dict[label] = 1
cstr = []
for k in sorted(count_dict):
cstr.append('%s: %s' % (k, count_dict[k]))
logdbg('counts: %s' % ''.join(cstr))
@staticmethod
def _find_dev(vendor_id, product_id):
"""Find the first device with vendor and product ID on the USB."""
for bus in usb.busses():
for dev in bus.devices:
if dev.idVendor == vendor_id and dev.idProduct == product_id:
logdbg('Found station at bus=%s device=%s' %
(bus.dirname, dev.filename))
return dev
return None
@staticmethod
def _verify_length(label, length, buf):
if buf[1] != length:
raise WrongLength("%s: wrong length: expected %02x, got %02x" %
(label, length, buf[1]))
@staticmethod
def _verify_checksum(label, buf, msb_first=True):
"""Calculate and compare checksum"""
try:
cs1 = Station._calc_checksum(buf)
cs2 = Station._extract_checksum(buf, msb_first)
if cs1 != cs2:
raise BadChecksum("%s: bad checksum: %04x != %04x" %
(label, cs1, cs2))
except IndexError, e:
raise BadChecksum("%s: not enough bytes for checksum: %s" %
(label, e))
@staticmethod
def _calc_checksum(buf):
cs = 0
for x in buf[:-2]:
cs += x
return cs
@staticmethod
def _extract_checksum(buf, msb_first):
if msb_first:
return (buf[-2] << 8) | buf[-1]
return (buf[-1] << 8) | buf[-2]
@staticmethod
def _extract_ts(buf):
if buf[0] == 0xee and buf[1] == 0xee and buf[2] == 0xee:
# year, month, and day are 0xee when timestamp is unset
return None
try:
year = int(buf[0]) + 2000
month = int(buf[1])
day = int(buf[2])
hour = int(buf[3])
minute = int(buf[4])
return time.mktime((year, month, day, hour, minute, 0, -1, -1, -1))
except IndexError:
raise WMR300Error("buffer too short for timestamp")
except (OverflowError, ValueError), e:
raise WMR300Error(
"cannot create timestamp from y:%s m:%s d:%s H:%s M:%s: %s" %
(buf[0], buf[1], buf[2], buf[3], buf[4], e))
@staticmethod
def _extract_signed(hi, lo, m):
if hi == 0x7f:
return None
s = 0
if hi & 0xf0 == 0xf0:
s = 0x10000
return ((hi << 8) + lo - s) * m
@staticmethod
def _extract_value(buf, m):
if buf[0] == 0x7f:
return None
if len(buf) == 2:
return ((buf[0] << 8) + buf[1]) * m
return buf[0] * m
@staticmethod
def get_latest_index(buf):
# get the index of the most recent history record
if buf[0] != 0x57:
return None
return (buf[17] << 8) + buf[18]
@staticmethod
def get_next_index(n):
# return the index of the record after indicated index
if n == 0:
return 0x20
if n + 1 > Station.MAX_RECORDS:
return 0x20 # FIXME: verify the wraparound
return n + 1
@staticmethod
def get_record_index(buf):
# extract the index from the history record
if buf[0] != 0xd2:
return None
return (buf[2] << 8) + buf[3]
@staticmethod
def decode(buf):
try:
pkt = getattr(Station, '_decode_%02x' % buf[0])(buf)
if DEBUG_DECODE:
logdbg('decode: %s %s' % (_fmt_bytes(buf), pkt))
return pkt
except IndexError, e:
raise WMR300Error("cannot decode buffer: %s" % e)
except AttributeError:
raise WMR300Error("unknown packet type %02x: %s" %
(buf[0], _fmt_bytes(buf)))
@staticmethod
def _decode_57(buf):
"""57 packet contains station information"""
pkt = dict()
pkt['packet_type'] = 0x57
pkt['station_type'] = ''.join("%s" % chr(x) for x in buf[0:6])
pkt['station_model'] = ''.join("%s" % chr(x) for x in buf[7:11])
if DEBUG_HISTORY:
nrec = (buf[17] << 8) + buf[18]
logdbg("history records: %s" % nrec)
return pkt
@staticmethod
def _decode_41(_):
"""41 43 4b is ACK"""
pkt = dict()
pkt['packet_type'] = 0x41
return pkt
@staticmethod
def _decode_d2(buf):
"""D2 packet contains history data"""
Station._verify_length("D2", 0x80, buf)
Station._verify_checksum("D2", buf[:0x80], msb_first=False)
pkt = dict()
pkt['packet_type'] = 0xd2
pkt['ts'] = Station._extract_ts(buf[4:9])
for i in range(0, 9):
pkt['temperature_%d' % i] = Station._extract_signed(
buf[9 + 2 * i], buf[10 + 2 * i], 0.1) # C
pkt['humidity_%d' % i] = Station._extract_value(
buf[27 + i:28 + i], 1.0) # %
for i in range(1, 9):
pkt['dewpoint_%d' % i] = Station._extract_signed(
buf[36 + 2 * i], buf[37 + 2 * i], 0.1) # C
pkt['heatindex_%d' % i] = Station._extract_signed(
buf[52 + 2 * i], buf[53 + 2 * i], 0.1) # C
pkt['windchill'] = Station._extract_signed(buf[68], buf[69], 0.1) # C
pkt['wind_gust'] = Station._extract_value(buf[72:74], 0.1) # m/s
pkt['wind_avg'] = Station._extract_value(buf[74:76], 0.1) # m/s
pkt['wind_gust_dir'] = Station._extract_value(buf[76:78], 1.0) # degree
pkt['wind_dir'] = Station._extract_value(buf[78:80], 1.0) # degree
pkt['forecast'] = Station._extract_value(buf[80:81], 1.0)
pkt['rain_hour'] = Station._extract_value(buf[83:85], 0.254) # mm
pkt['rain_total'] = Station._extract_value(buf[86:88], 0.254) # mm
pkt['rain_start_dateTime'] = Station._extract_ts(buf[88:93])
pkt['rain_rate'] = Station._extract_value(buf[93:95], 0.254) # mm/hour
pkt['barometer'] = Station._extract_value(buf[95:97], 0.1) # mbar
pkt['pressure_trend'] = Station._extract_value(buf[97:98], 1.0)
return pkt
@staticmethod
def _decode_d3(buf):
"""D3 packet contains temperature/humidity data"""
Station._verify_length("D3", 0x3d, buf)
Station._verify_checksum("D3", buf[:0x3d])
pkt = dict()
pkt['packet_type'] = 0xd3
pkt['ts'] = Station._extract_ts(buf[2:7])
pkt['channel'] = buf[7]
pkt['temperature_%d' % pkt['channel']] = Station._extract_signed(
buf[8], buf[9], 0.1) # C
pkt['humidity_%d' % pkt['channel']] = Station._extract_value(
buf[10:11], 1.0) # %
pkt['dewpoint_%d' % pkt['channel']] = Station._extract_signed(
buf[11], buf[12], 0.1) # C
pkt['heatindex_%d' % pkt['channel']] = Station._extract_signed(
buf[13], buf[14], 0.1) # C
return pkt
@staticmethod
def _decode_d4(buf):
"""D4 packet contains wind data"""
Station._verify_length("D4", 0x36, buf)
Station._verify_checksum("D4", buf[:0x36])
pkt = dict()
pkt['packet_type'] = 0xd4
pkt['ts'] = Station._extract_ts(buf[2:7])
pkt['channel'] = buf[7]
pkt['wind_gust'] = Station._extract_value(buf[8:10], 0.1) # m/s
pkt['wind_gust_dir'] = Station._extract_value(buf[10:12], 1.0) # degree
pkt['wind_avg'] = Station._extract_value(buf[12:14], 0.1) # m/s
pkt['wind_dir'] = Station._extract_value(buf[14:16], 1.0) # degree
pkt['windchill'] = Station._extract_signed(buf[18], buf[19], 0.1) # C
return pkt
@staticmethod
def _decode_d5(buf):
"""D5 packet contains rain data"""
Station._verify_length("D5", 0x28, buf)
Station._verify_checksum("D5", buf[:0x28])
pkt = dict()
pkt['packet_type'] = 0xd5
pkt['ts'] = Station._extract_ts(buf[2:7])
pkt['channel'] = buf[7]
pkt['rain_hour'] = Station._extract_value(buf[9:11], 0.254) # mm
pkt['rain_24_hour'] = Station._extract_value(buf[12:14], 0.254) # mm
pkt['rain_total'] = Station._extract_value(buf[15:17], 0.254) # mm
pkt['rain_rate'] = Station._extract_value(buf[17:19], 0.254) # mm/hour
pkt['rain_start_dateTime'] = Station._extract_ts(buf[19:24])
return pkt
@staticmethod
def _decode_d6(buf):
"""D6 packet contains pressure data"""
Station._verify_length("D6", 0x2e, buf)
Station._verify_checksum("D6", buf[:0x2e])
pkt = dict()
pkt['packet_type'] = 0xd6
pkt['ts'] = Station._extract_ts(buf[2:7])
pkt['channel'] = buf[7]
pkt['pressure'] = Station._extract_value(buf[8:10], 0.1) # mbar
pkt['barometer'] = Station._extract_value(buf[10:12], 0.1) # mbar
pkt['altitude'] = Station._extract_value(buf[12:14], 1.0) # meter
return pkt
@staticmethod
def _decode_dc(buf):
"""DC packet contains temperature/humidity range data"""
Station._verify_length("DC", 0x3e, buf)
Station._verify_checksum("DC", buf[:0x3e])
pkt = dict()
pkt['packet_type'] = 0xdc
pkt['ts'] = Station._extract_ts(buf[2:7])
return pkt
@staticmethod
def _decode_db(buf):
"""DB packet is forecast"""
Station._verify_length("DB", 0x20, buf)
Station._verify_checksum("DB", buf[:0x20])
pkt = dict()
pkt['packet_type'] = 0xdb
return pkt
class WMR300ConfEditor(weewx.drivers.AbstractConfEditor):
@property
def default_stanza(self):
return """
[WMR300]
# This section is for WMR300 weather stations.
# The station model, e.g., WMR300A
model = WMR300
# The driver to use:
driver = weewx.drivers.wmr300
"""
def modify_config(self, config_dict):
print """
Setting rainRate, windchill, heatindex, and dewpoint calculations to hardware."""
config_dict.setdefault('StdWXCalculate', {})
config_dict['StdWXCalculate'].setdefault('Calculations', {})
config_dict['StdWXCalculate']['Calculations']['rainRate'] = 'hardware'
config_dict['StdWXCalculate']['Calculations']['windchill'] = 'hardware'
config_dict['StdWXCalculate']['Calculations']['heatindex'] = 'hardware'
config_dict['StdWXCalculate']['Calculations']['dewpoint'] = 'hardware'
# define a main entry point for basic testing of the station without weewx
# engine and service overhead. invoke this as follows from the weewx root dir:
#
# PYTHONPATH=bin python bin/user/wmr300.py
if __name__ == '__main__':
import optparse
usage = """%prog [options] [--help]"""
syslog.openlog('wmr300', syslog.LOG_PID | syslog.LOG_CONS)
syslog.setlogmask(syslog.LOG_UPTO(syslog.LOG_DEBUG))
parser = optparse.OptionParser(usage=usage)
parser.add_option('--version', dest='version', action='store_true',
help='display driver version')
(options, args) = parser.parse_args()
if options.version:
print "wmr300 driver version %s" % DRIVER_VERSION
exit(0)
driver_dict = {
'debug_comm': 1,
'debug_packet': 0,
'debug_counts': 1,
'debug_decode': 0}
stn = WMR300Driver(**driver_dict)
for packet in stn.genLoopPackets():
print packet
| paolobenve/weewx | bin/weewx/drivers/wmr300.py | Python | gpl-3.0 | 46,206 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (C) 2012-2019 British Crown (Met Office) & Contributors.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
"""Suite engine processor management."""
from metomi.isodatetime.data import Duration
from metomi.isodatetime.parsers import DurationParser, ISO8601SyntaxError
from glob import glob
import os
import pwd
import re
from metomi.rose.date import RoseDateTimeOperator, OffsetValueError
from metomi.rose.fs_util import FileSystemUtil
from metomi.rose.host_select import HostSelector
from metomi.rose.popen import RosePopener
from metomi.rose.reporter import Event
from metomi.rose.resource import ResourceLocator
from metomi.rose.scheme_handler import SchemeHandlersManager
import sys
import webbrowser
class NoSuiteLogError(Exception):
"""An exception raised on a missing suite log."""
def __str__(self):
user_name, suite_name = self.args[0:2]
arg = suite_name
if user_name:
arg += " ~" + user_name
return "%s: suite log not found" % arg
class WebBrowserEvent(Event):
"""An event raised when a web browser is launched."""
LEVEL = Event.V
def __init__(self, *args):
Event.__init__(self, *args)
self.browser, self.url = args
def __str__(self):
return "%s %s" % self.args
class BaseCycleOffset(object):
"""Represent a cycle time offset."""
def to_duration(self):
"""Convert to a Duration."""
raise NotImplementedError()
class OldFormatCycleOffset(BaseCycleOffset):
"""Represent a cycle time offset, back compat syntax."""
KEYS = {"W": ("days", 7),
"D": ("days", 1),
"H": ("hours", 1),
"M": ("minutes", 1)}
REC_TEXT = re.compile(r"\A"
r"(?P<sign>__)?"
r"(?P<is_time>T)?"
r"(?P<amount>\d+)"
r"(?P<unit>(?(is_time)[SMH]|[DW]))?"
r"\Z")
SIGN_DEFAULT = ""
def __init__(self, offset_text):
"""Parse offset_text into a Duration-convertible form.
Expect offset_text in this format:
* A __ double underscore denotes an offset to the future.
Otherwise, it is an offset to the past.
* For the rest:
nW denotes n weeks.
n or nD denotes n days.
Tn or TnH denotes n hours.
TnM denotes n minutes.
TnS denotes n seconds.
"""
BaseCycleOffset.__init__(self)
match = self.REC_TEXT.match(offset_text.upper())
if not match:
raise CycleOffsetError(offset_text)
self.is_time = match.group("is_time")
if self.is_time is None:
self.is_time = ""
self.sign = match.group("sign")
if not self.sign:
self.sign = self.SIGN_DEFAULT
self.amount = int(match.group("amount"))
self.unit = match.group("unit")
if not self.unit:
if self.is_time:
self.unit = "H"
else:
self.unit = "D"
def __str__(self):
return "%s%s%d%s" % (self.sign, self.is_time, self.amount, self.unit)
def to_duration(self):
"""Convert to a Duration."""
date_time_unit, multiplier = self.KEYS[self.unit]
amount = self.amount
if self.sign == self.SIGN_DEFAULT: # negative
amount = -amount
return Duration(**{date_time_unit: multiplier * amount})
class ISOCycleOffset(BaseCycleOffset):
"""Represent a cycle time offset, ISO8601 syntax."""
def __init__(self, offset_text):
"""Parse offset_text into a Duration-convertible form.
Expect offset_text in this format:
* A __ double underscore denotes an offset to the future.
Otherwise, it is an offset to the past.
* For the rest, use an ISO 8601 compatible duration.
"""
BaseCycleOffset.__init__(self)
if offset_text.startswith("__"):
self.sign_factor = 1
else:
self.sign_factor = -1
self.duration = DurationParser().parse(offset_text)
self.duration *= self.sign_factor
def __str__(self):
duration_str = str(self.duration)
if duration_str.startswith("-"):
return duration_str[1:]
return "__" + duration_str
def to_duration(self):
"""Convert to a Duration."""
return self.duration
class SuiteEngineGlobalConfCompatError(Exception):
"""An exception raised on incompatible global configuration."""
def __str__(self):
engine, key, value = self.args
return ("%s global configuration incompatible to Rose: %s=%s" %
(engine, key, value))
class SuiteNotRunningError(Exception):
"""An exception raised when a suite is not running."""
def __str__(self):
return "%s: does not appear to be running" % (self.args)
class SuiteStillRunningError(Exception):
"""An exception raised when a suite is still running."""
FMT_HEAD = "Suite \"%(suite_name)s\" appears to be running:\n"
def __str__(self):
suite_name, extras = self.args
return self.FMT_HEAD % {"suite_name": suite_name} + "".join(extras)
class CycleOffsetError(ValueError):
"""Unrecognised cycle time offset format."""
def __str__(self):
return self.args[0] + ": unrecognised cycle time offset format."
class CycleTimeError(ValueError):
"""Unrecognised cycle time format."""
def __str__(self):
return self.args[0] + ": unrecognised cycle time format."
class CyclingModeError(ValueError):
"""Unrecognised cycling mode."""
def __str__(self):
return self.args[0] + ": unrecognised cycling mode."
class TaskProps(object):
"""Task properties.
suite_name: name of the suite
suite_dir_rel: path to suite directory relative to $HOME
suite_dir: path to suite directory
task_id: task ID, may contain both the name and the cycle time
task_name: task name
task_prefix: prefix in task name (optional)
task_suffix: suffix in task name (optional)
cycling_mode: type of cycling used in the suite
task_cycle_time: task cycle time
task_log_root: path to the task log without file extension
task_is_cold_start: string "true" for a cold start task
dir_data: path to suite data directory
dir_data_cycle: path to suite data directory in this cycle time
dir_data_cycle_offsets: dict of time offsets: paths to suite data directory
dir_etc: path to etc directory
"""
ATTRS = {"suite_name": "ROSE_SUITE_NAME",
"suite_dir_rel": "ROSE_SUITE_DIR_REL",
"suite_dir": "ROSE_SUITE_DIR",
"task_id": "ROSE_TASK_ID",
"task_name": "ROSE_TASK_NAME",
"task_prefix": "ROSE_TASK_PREFIX",
"task_suffix": "ROSE_TASK_SUFFIX",
"cycling_mode": "ROSE_CYCLING_MODE",
"task_cycle_time": "ROSE_TASK_CYCLE_TIME",
"task_log_dir": "ROSE_TASK_LOG_DIR",
"task_log_root": "ROSE_TASK_LOG_ROOT",
"task_is_cold_start": "ROSE_TASK_IS_COLD_START",
"dir_data": "ROSE_DATA",
"dir_data_cycle": "ROSE_DATAC",
"dir_data_cycle_offsets": "ROSE_DATAC%s",
"dir_etc": "ROSE_ETC"}
def __init__(self, **kwargs):
for attr_key, env_key in self.ATTRS.items():
if kwargs.get(attr_key) is not None:
setattr(self, attr_key, kwargs.get(attr_key))
elif env_key.endswith("%s"):
setattr(self, attr_key, {})
prefix = env_key.replace("%s", "")
for key, value in os.environ.items():
if key == prefix or not key.startswith(prefix):
continue
try:
cycle_offset = get_cycle_offset(
key.replace(prefix, ""))
except ValueError:
continue
getattr(self, attr_key)[cycle_offset] = value
elif os.getenv(env_key) is not None:
setattr(self, attr_key, os.getenv(env_key))
else:
setattr(self, attr_key, None)
def __iter__(self):
for attr_key, env_key in sorted(self.ATTRS.items()):
attr_value = getattr(self, attr_key)
if attr_value is not None:
if isinstance(attr_value, dict):
for key, value in attr_value.items():
yield (env_key % key, str(value))
else:
yield (env_key, str(attr_value))
def __str__(self):
ret = ""
for name, value in self:
if value is not None:
ret += "%s=%s\n" % (name, str(value))
return ret
class SuiteEngineProcessor(object):
"""An abstract suite engine processor."""
TASK_NAME_DELIM = {"prefix": "_", "suffix": "_"}
SCHEME = None
SCHEME_HANDLER_MANAGER = None
SCHEME_DEFAULT = "cylc" # TODO: site configuration?
TIMEOUT = 5 # seconds
@classmethod
def get_processor(cls, key=None, event_handler=None, popen=None,
fs_util=None, host_selector=None):
"""Return a processor for the suite engine named by "key"."""
if cls.SCHEME_HANDLER_MANAGER is None:
path = os.path.dirname(
os.path.dirname(sys.modules["metomi.rose"].__file__))
cls.SCHEME_HANDLER_MANAGER = SchemeHandlersManager(
[path], ns="rose.suite_engine_procs", attrs=["SCHEME"],
can_handle=None, event_handler=event_handler, popen=popen,
fs_util=fs_util, host_selector=host_selector)
if key is None:
key = cls.SCHEME_DEFAULT
x = cls.SCHEME_HANDLER_MANAGER.get_handler(key)
return x
def __init__(self, event_handler=None, popen=None, fs_util=None,
host_selector=None, **_):
self.event_handler = event_handler
if popen is None:
popen = RosePopener(event_handler)
self.popen = popen
if fs_util is None:
fs_util = FileSystemUtil(event_handler)
self.fs_util = fs_util
if host_selector is None:
host_selector = HostSelector(event_handler, popen)
self.host_selector = host_selector
self.date_time_oper = RoseDateTimeOperator()
def check_global_conf_compat(self):
"""Raise exception on suite engine specific incompatibity.
Should raise SuiteEngineGlobalConfCompatError.
"""
raise NotImplementedError()
def check_suite_not_running(self, suite_name):
"""Check that suite is not running.
This method is not implemented. Sub-class should override.
Arguments:
suite_name: name of suite to check.
Raise:
SuiteStillRunningError:
Should raise SuiteStillRunningError if suite is still running.
"""
raise NotImplementedError()
def cmp_suite_conf(
self, suite_name, run_mode, strict_mode=False, debug_mode=False):
"""Compare current suite configuration with that in the previous run.
An implementation of this method should:
* Raise an exception on failure.
* Return True if suite configuration is unmodified c.f. previous run.
* Return False otherwise.
"""
raise NotImplementedError()
def get_suite_contact(self, suite_name):
"""Return suite contact information for a user suite.
Return (dict): suite contact information.
"""
raise NotImplementedError()
def get_suite_dir(self, suite_name, *paths):
"""Return the path to the suite running directory.
paths -- if specified, are added to the end of the path.
"""
return os.path.join(os.path.expanduser("~"),
self.get_suite_dir_rel(suite_name, *paths))
def get_suite_dir_rel(self, suite_name, *paths):
"""Return the relative path to the suite running directory.
paths -- if specified, are added to the end of the path.
"""
raise NotImplementedError()
def get_suite_log_url(self, user_name, suite_name):
"""Return the "rose bush" URL for a user's suite."""
prefix = "~"
if user_name:
prefix += user_name
suite_d = os.path.join(prefix, self.get_suite_dir_rel(suite_name))
suite_d = os.path.expanduser(suite_d)
if not os.path.isdir(suite_d):
raise NoSuiteLogError(user_name, suite_name)
rose_bush_url = None
for f_name in glob(os.path.expanduser("~/.metomi/rose-bush*.status")):
status = {}
for line in open(f_name):
key, value = line.strip().split("=", 1)
status[key] = value
if status.get("host"):
rose_bush_url = "http://" + status["host"]
if status.get("port"):
rose_bush_url += ":" + status["port"]
rose_bush_url += "/"
break
if not rose_bush_url:
conf = ResourceLocator.default().get_conf()
rose_bush_url = conf.get_value(
["rose-suite-log", "rose-bush"])
if not rose_bush_url:
return "file://" + suite_d
if not rose_bush_url.endswith("/"):
rose_bush_url += "/"
if not user_name:
user_name = pwd.getpwuid(os.getuid()).pw_name
return rose_bush_url + "/".join(
["taskjobs", user_name, suite_name])
def get_task_auth(self, suite_name, task_name):
"""Return [user@]host for a remote task in a suite."""
raise NotImplementedError()
def get_tasks_auths(self, suite_name):
"""Return a list of [user@]host for remote tasks in a suite."""
raise NotImplementedError()
def get_task_props(self, *args, **kwargs):
"""Return a TaskProps object containing suite task's attributes."""
calendar_mode = self.date_time_oper.get_calendar_mode()
try:
return self._get_task_props(*args, **kwargs)
finally:
# Restore calendar mode if changed
self.date_time_oper.set_calendar_mode(calendar_mode)
def _get_task_props(self, *_, **kwargs):
"""Helper for get_task_props."""
tprops = TaskProps()
# If suite_name and task_id are defined, we can assume that the rest
# are defined as well.
if tprops.suite_name is not None and tprops.task_id is not None:
return tprops
tprops = self.get_task_props_from_env()
# Modify calendar mode, if possible
self.date_time_oper.set_calendar_mode(tprops.cycling_mode)
if kwargs["cycle"] is not None:
try:
cycle_offset = get_cycle_offset(kwargs["cycle"])
except ISO8601SyntaxError:
tprops.task_cycle_time = kwargs["cycle"]
else:
if tprops.task_cycle_time:
tprops.task_cycle_time = self._get_offset_cycle_time(
tprops.task_cycle_time, cycle_offset)
else:
tprops.task_cycle_time = kwargs["cycle"]
# Etc directory
if os.path.exists(os.path.join(tprops.suite_dir, "etc")):
tprops.dir_etc = os.path.join(tprops.suite_dir, "etc")
# Data directory: generic, current cycle, and previous cycle
tprops.dir_data = os.path.join(tprops.suite_dir, "share", "data")
if tprops.task_cycle_time is not None:
task_cycle_time = tprops.task_cycle_time
tprops.dir_data_cycle = os.path.join(
tprops.suite_dir, "share", "cycle", str(task_cycle_time))
# Offset cycles
if kwargs.get("cycle_offsets"):
cycle_offset_strings = []
for value in kwargs.get("cycle_offsets"):
cycle_offset_strings.extend(value.split(","))
for value in cycle_offset_strings:
if tprops.cycling_mode == "integer":
cycle_offset = value
if cycle_offset.startswith("__"):
sign_factor = 1
else:
sign_factor = -1
offset_val = cycle_offset.replace("__", "")
cycle_time = str(
int(task_cycle_time) +
sign_factor * int(offset_val.replace("P", "")))
else:
cycle_offset = get_cycle_offset(value)
cycle_time = self._get_offset_cycle_time(
task_cycle_time, cycle_offset)
tprops.dir_data_cycle_offsets[str(cycle_offset)] = (
os.path.join(
tprops.suite_dir, "share", "cycle", cycle_time))
# Create data directories if necessary
# Note: should we create the offsets directories?
for dir_ in (
[tprops.dir_data, tprops.dir_data_cycle] +
list(tprops.dir_data_cycle_offsets.values())):
if dir_ is None:
continue
if os.path.exists(dir_) and not os.path.isdir(dir_):
self.fs_util.delete(dir_)
self.fs_util.makedirs(dir_)
# Task prefix and suffix
for key, split, index in [("prefix", str.split, 0),
("suffix", str.rsplit, 1)]:
delim = self.TASK_NAME_DELIM[key]
if kwargs.get(key + "_delim"):
delim = kwargs.get(key + "_delim")
if delim in tprops.task_name:
res = split(tprops.task_name, delim, 1)
setattr(tprops, "task_" + key, res[index])
return tprops
def get_task_props_from_env(self):
"""Return a TaskProps object.
This method should not be used directly. Call get_task_props() instead.
"""
raise NotImplementedError()
def get_version(self):
"""Return the version string of the suite engine."""
raise NotImplementedError()
def get_version_env_name(self):
"""Return the name of the suite engine version environment variable."""
return self.SCHEME.upper() + "_VERSION"
def handle_event(self, *args, **kwargs):
"""Call self.event_handler if it is callable."""
if callable(self.event_handler):
return self.event_handler(*args, **kwargs)
def is_suite_registered(self, suite_name):
"""Return whether or not a suite is registered."""
raise NotImplementedError()
def job_logs_archive(self, suite_name, items):
"""Archive cycle job logs.
suite_name -- The name of a suite.
items -- A list of relevant items.
"""
raise NotImplementedError()
def job_logs_pull_remote(self, suite_name, items,
prune_remote_mode=False, force_mode=False):
"""Pull and housekeep the job logs on remote task hosts.
suite_name -- The name of a suite.
items -- A list of relevant items.
prune_remote_mode -- Remove remote job logs after pulling them.
force_mode -- Force retrieval, even if it may not be necessary.
"""
raise NotImplementedError()
def job_logs_remove_on_server(self, suite_name, items):
"""Remove cycle job logs.
suite_name -- The name of a suite.
items -- A list of relevant items.
"""
raise NotImplementedError()
def launch_suite_log_browser(self, user_name, suite_name):
"""Launch web browser to view suite log.
Return URL of suite log on success, None otherwise.
"""
url = self.get_suite_log_url(user_name, suite_name)
browser = webbrowser.get()
browser.open(url, new=True, autoraise=True)
self.handle_event(WebBrowserEvent(browser.name, url))
return url
def parse_job_log_rel_path(self, f_name):
"""Return (cycle, task, submit_num, ext) for a job log rel path."""
raise NotImplementedError()
def run(self, suite_name, host=None, run_mode=None, args=None):
"""Start a suite (in a specified host)."""
raise NotImplementedError()
def shutdown(self, suite_name, args=None, stderr=None, stdout=None):
"""Shut down the suite."""
raise NotImplementedError()
def _get_offset_cycle_time(self, cycle, cycle_offset):
"""Return the actual date time of an BaseCycleOffset against cycle.
cycle: a YYYYmmddHH or ISO 8601 date/time string.
cycle_offset: an instance of BaseCycleOffset.
Return date time in the same format as cycle.
Note: It would be desirable to switch to a ISO 8601 format,
but due to Cylc's YYYYmmddHH format, it would be too confusing to do so
at the moment.
"""
offset_str = str(cycle_offset.to_duration())
try:
time_point, parse_format = self.date_time_oper.date_parse(cycle)
time_point = self.date_time_oper.date_shift(time_point, offset_str)
return self.date_time_oper.date_format(parse_format, time_point)
except OffsetValueError:
raise
except ValueError:
raise CycleTimeError(cycle)
def get_cycle_offset(offset_text):
"""Return the correct BaseCycleOffset type for offset_text."""
try:
cycle_offset = OldFormatCycleOffset(offset_text)
except CycleOffsetError:
cycle_offset = ISOCycleOffset(offset_text)
return cycle_offset
| benfitzpatrick/rose | metomi/rose/suite_engine_proc.py | Python | gpl-3.0 | 22,663 |
#!/usr/bin/python
"""
Daylight savings dates.
Copyright (c) 2015 Kauinoa
License: MIT
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of
the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
from . import util
def get_start_dst(year):
"""
Get the start of daylight savings.
2nd Sunday in March.
"""
return util.get_date_in_month(year, util.MAR, util.SUN, 2)
def get_end_dst(year):
"""
Get the end of daylight savings.
1st Sunday in November.
"""
return util.get_date_in_month(year, util.NOV, util.SUN, 1)
holidays = {
"Day Light Savings' Starts": get_start_dst,
"Day Light Savings' Ends": get_end_dst
}
| kauinoa/CalendarEvents | calendarevents/dst_holidays.py | Python | mit | 1,664 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import datetime
from configman.dotdict import DotDict
import isodate
import pytest
from socorro.lib import BadArgumentError, external_common
class TestExternalCommon(object):
"""Test functions of the external_common module. """
def test_check_type(self):
# Test 1: null
param = None
datatype = "datetime"
res = external_common.check_type(param, datatype)
assert res is None
# Test 2: integer
param = 12
datatype = "int"
res = external_common.check_type(param, datatype)
assert res == param
# Test 3: integer
param = "12"
datatype = "int"
res = external_common.check_type(param, datatype)
assert res == 12
# Test 4: string
param = datetime.datetime(2012, 1, 1)
datatype = "str"
res = external_common.check_type(param, datatype)
assert res == "2012-01-01 00:00:00"
# Test 5: boolean
param = 1
datatype = "bool"
res = external_common.check_type(param, datatype)
assert res is True
# Test 6: boolean
param = "T"
datatype = "bool"
res = external_common.check_type(param, datatype)
assert res is True
# Test 7: boolean
param = 14
datatype = "bool"
res = external_common.check_type(param, datatype)
assert res is False
# Test 8: datetime
param = "2012-01-01T00:00:00"
datatype = "datetime"
res = external_common.check_type(param, datatype)
assert isinstance(res, datetime.datetime)
assert res.year == 2012
assert res.month == 1
assert res.hour == 0
# Test 9: timedelta
param = "72"
datatype = "timedelta"
res = external_common.check_type(param, datatype)
assert isinstance(res, datetime.timedelta)
assert res.days == 3
# Test: date
param = "2012-01-01"
datatype = "date"
res = external_common.check_type(param, datatype)
assert isinstance(res, datetime.date)
assert res.year == 2012
assert res.month == 1
assert res.day == 1
def test_parse_arguments_old_way(self):
"""Test external_common.parse_arguments(). """
filters = [
("param1", "default", ["list", "str"]),
("param2", None, "int"),
("param3", ["list", "of", 4, "values"], ["list", "str"]),
]
arguments = {"param1": "value1", "unknown": 12345}
params_exp = DotDict()
params_exp.param1 = ["value1"]
params_exp.param2 = None
params_exp.param3 = ["list", "of", "4", "values"]
params = external_common.parse_arguments(filters, arguments, modern=False)
assert params == params_exp
def test_parse_arguments(self):
"""Test external_common.parse_arguments(). """
filters = [
("param1", "default", [str]),
("param2", None, int),
("param3", ["some", "default", "list"], [str]),
("param4", ["list", "of", 4, "values"], [str]),
("param5", None, bool),
("param6", None, datetime.date),
("param7", None, datetime.date),
("param8", None, datetime.datetime),
("param9", None, [str]),
]
arguments = {
"param1": "value1",
"unknown": 12345,
"param5": "true",
"param7": datetime.date(2016, 2, 9).isoformat(),
"param8": datetime.datetime(2016, 2, 9).isoformat(),
# note the 'param9' is deliberately not specified.
}
params_exp = DotDict()
params_exp.param1 = ["value1"]
params_exp.param2 = None
params_exp.param3 = ["some", "default", "list"]
params_exp.param4 = ["list", "of", "4", "values"]
params_exp.param5 = True
params_exp.param6 = None
params_exp.param7 = datetime.date(2016, 2, 9)
params_exp.param8 = datetime.datetime(2016, 2, 9).replace(tzinfo=isodate.UTC)
params_exp.param9 = None
params = external_common.parse_arguments(filters, arguments, modern=True)
for key in params:
assert params[key] == params_exp[key]
assert params == params_exp
def test_parse_arguments_with_class_validators(self):
class NumberConverter(object):
def clean(self, value):
conv = {"one": 1, "two": 2, "three": 3}
try:
return conv[value]
except KeyError:
raise ValueError("No idea?!")
# Define a set of filters with some types being non-trivial types
# but instead a custom validator.
filters = [("param1", 0, NumberConverter())]
arguments = {"param1": "one"}
params_exp = DotDict()
params_exp.param1 = 1
params = external_common.parse_arguments(filters, arguments, modern=True)
assert params == params_exp
# note that a ValueError becomes a BadArgumentError
arguments = {"param1": "will cause a ValueError in NumberConverter.clean"}
with pytest.raises(BadArgumentError):
external_common.parse_arguments(filters, arguments, modern=True)
| mozilla/socorro | socorro/unittest/lib/test_external_common.py | Python | mpl-2.0 | 5,520 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui\dialog_error.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog_error(object):
def setupUi(self, Dialog_error):
Dialog_error.setObjectName(_fromUtf8("Dialog_error"))
Dialog_error.resize(255, 120)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog_error.sizePolicy().hasHeightForWidth())
Dialog_error.setSizePolicy(sizePolicy)
Dialog_error.setMaximumSize(QtCore.QSize(640, 480))
Dialog_error.setStyleSheet(_fromUtf8("QWidget{\n"
" background-color: rgb(67, 67, 67);\n"
" color: rgb(255, 255, 255);\n"
"}"))
self.verticalLayout = QtGui.QVBoxLayout(Dialog_error)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(Dialog_error)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.buttonBox = QtGui.QDialogButtonBox(Dialog_error)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog_error)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog_error.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog_error.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog_error)
def retranslateUi(self, Dialog_error):
pass
| KatonaLab/vividstorm | views/dialog_error.py | Python | lgpl-3.0 | 2,356 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.Dataset.shard()`."""
from absl.testing import parameterized
from tensorflow.python.data.experimental.ops import random_access
from tensorflow.python.data.kernel_tests import checkpoint_test_base
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import combinations
from tensorflow.python.framework import errors
from tensorflow.python.platform import test
class ShardTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(test_base.default_test_combinations())
def testSimpleCase(self):
dataset = dataset_ops.Dataset.range(10).shard(5, 2)
self.assertDatasetProduces(dataset, expected_output=[2, 7])
@combinations.generate(test_base.default_test_combinations())
def testNestedData(self):
dataset_a = dataset_ops.Dataset.range(10)
dataset_b = dataset_ops.Dataset.range(10, 0, -1)
dataset = dataset_ops.Dataset.zip((dataset_a, dataset_b)).shard(5, 2)
self.assertDatasetProduces(dataset, expected_output=[(2, 8), (7, 3)])
@combinations.generate(test_base.default_test_combinations())
def testOffsetZero(self):
dataset = dataset_ops.Dataset.range(10).shard(5, 0)
self.assertDatasetProduces(dataset, expected_output=[0, 5])
@combinations.generate(test_base.default_test_combinations())
def testOffsetGreaterNumShards(self):
with self.assertRaises(errors.InvalidArgumentError):
dataset = dataset_ops.Dataset.range(10).shard(5, 7)
self.evaluate(self.getNext(dataset)())
@combinations.generate(test_base.default_test_combinations())
def testNegativeOffset(self):
with self.assertRaises(errors.InvalidArgumentError):
dataset = dataset_ops.Dataset.range(10).shard(5, -3)
self.evaluate(self.getNext(dataset)())
@combinations.generate(test_base.default_test_combinations())
def testNegativeNumShards(self):
with self.assertRaises(errors.InvalidArgumentError):
dataset = dataset_ops.Dataset.range(10).shard(-3, 1)
self.evaluate(self.getNext(dataset)())
@combinations.generate(test_base.default_test_combinations())
def testZeroNumShards(self):
with self.assertRaises(errors.InvalidArgumentError):
dataset = dataset_ops.Dataset.range(10).shard(0, 1)
self.evaluate(self.getNext(dataset)())
@combinations.generate(test_base.default_test_combinations())
def testIteratorEndsBeforeFirstElem(self):
dataset = dataset_ops.Dataset.range(1).shard(5, 2)
self.assertDatasetProduces(dataset, expected_output=[])
@combinations.generate(test_base.default_test_combinations())
def testLargerWorkerPool(self):
dataset = dataset_ops.Dataset.range(10).shard(7, 5)
self.assertDatasetProduces(dataset, expected_output=[5])
@combinations.generate(test_base.default_test_combinations())
def testIndexEqualsNumShards(self):
dataset = dataset_ops.Dataset.range(10).shard(5, 4)
self.assertDatasetProduces(dataset, expected_output=[4, 9])
@combinations.generate(test_base.default_test_combinations())
def testIndexEqualsNumShards2(self):
dataset = dataset_ops.Dataset.range(10).shard(4, 3)
self.assertDatasetProduces(dataset, expected_output=[3, 7])
@combinations.generate(test_base.default_test_combinations())
def testNumShardsLargerThanDataset(self):
dataset = dataset_ops.Dataset.range(10).shard(20, 5)
self.assertDatasetProduces(dataset, expected_output=[5])
@combinations.generate(test_base.default_test_combinations())
def testName(self):
dataset = dataset_ops.Dataset.from_tensors(42).shard(1, 0, name="shard")
self.assertDatasetProduces(dataset, [42])
class ShardCheckpointTest(checkpoint_test_base.CheckpointTestBase,
parameterized.TestCase):
def _build_dataset(self, num_elements, num_shards, index):
return dataset_ops.Dataset.range(num_elements).shard(num_shards, index)
@combinations.generate(
combinations.times(
test_base.default_test_combinations(),
checkpoint_test_base.default_test_combinations(),
combinations.combine(
elems=[10, 100], num_shards=[2, 5], index=[0, 1])))
def test(self, verify_fn, elems, num_shards, index):
verify_fn(
self,
lambda: self._build_dataset(elems, num_shards, index),
num_outputs=elems // num_shards)
class ShardRandomAccessTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
combinations.combine(index=[-1, 2, 3, 4])))
def testInvalidIndex(self, index):
dataset = dataset_ops.Dataset.range(4).shard(num_shards=2, index=0)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(random_access.at(dataset, index=index))
@combinations.generate(test_base.default_test_combinations())
def testEmptyDataset(self):
dataset = dataset_ops.Dataset.from_tensor_slices([]).shard(
num_shards=2, index=1)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(random_access.at(dataset, index=0))
@combinations.generate(test_base.default_test_combinations())
def testNumShardsAndIndexLessThanNumElements(self):
dataset = dataset_ops.Dataset.range(10).shard(5, 0)
self.assertEqual(0, self.evaluate(random_access.at(dataset, 0)))
self.assertEqual(5, self.evaluate(random_access.at(dataset, 1)))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(random_access.at(dataset, 2))
@combinations.generate(test_base.default_test_combinations())
def testNumShardsGreaterThanNumElementsIndexLess(self):
dataset = dataset_ops.Dataset.range(7).shard(8, 3)
self.assertEqual(3, self.evaluate(random_access.at(dataset, 0)))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(random_access.at(dataset, 1))
@combinations.generate(test_base.default_test_combinations())
def testNumShardsAndIndexGreaterThanNumElements(self):
dataset = dataset_ops.Dataset.range(13).shard(23, 21)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(random_access.at(dataset, 0))
@combinations.generate(
combinations.times(
test_base.default_test_combinations(),
combinations.combine(
elements=[0, 10, 50],
num_shards=[5, 7, 10],
index=[0, 1, 2, 3, 4],
)))
def testMultipleCombinations(self, elements, num_shards, index):
components = range(elements)
dataset = dataset_ops.Dataset.range(elements).shard(
num_shards=num_shards, index=index)
len_dataset = self.evaluate(dataset.cardinality())
for i in range(self.evaluate(dataset.cardinality())):
self.assertAllEqual(components[index + (num_shards * i)],
self.evaluate(random_access.at(dataset, i)))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(random_access.at(dataset, index=len_dataset))
if __name__ == "__main__":
test.main()
| tensorflow/tensorflow | tensorflow/python/data/kernel_tests/shard_test.py | Python | apache-2.0 | 7,742 |
# -*- coding: utf-8 -*-
#########################
# CONFLOG #
#########################
"""
Definitions of logging.
"""
#########################
# IMPORTS #
#########################
import logging
from logging.handlers import RotatingFileHandler
FILENAME_LOG = 'logs/munin.log'
LOGGER_NAME = 'munin'
LOGGER = logging.getLogger(LOGGER_NAME)
#########################
# INIT LOGGING #
#########################
LOGGER.setLevel(logging.DEBUG)
# create formatter
formatter = logging.Formatter(
'%(asctime)s :: %(levelname)s :: %(message)s'
)
# create a handler to file
file_handler = RotatingFileHandler(
FILENAME_LOG, # filename
'a', 1000000, 1 # append, 1 Mo, 1 backup
)
# and define its level and formatter
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
# add handlers to LOGGER
for handler in (file_handler,):
LOGGER.addHandler(handler)
| Aluriak/munin | munin/config/conflog.py | Python | gpl-2.0 | 934 |
from app.controller.watsonLanguage import WatsonToneAnalyzer
app = WatsonToneAnalyzer()
print(app.doAnalyze(text='I am very happy')) | weizy1981/WatsonRobot | run/runToneAnalyzer.py | Python | apache-2.0 | 132 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('diagnosticos', '0013_auto_20150505_2053'),
]
operations = [
migrations.AlterField(
model_name='diagnosticos',
name='fecha',
field=models.DateField(default=datetime.datetime(2015, 5, 6, 1, 17, 17, 301473), help_text='Formato: dd/mm/yyyy'),
preserve_default=True,
),
migrations.AlterField(
model_name='diagnosticos',
name='hora',
field=models.TimeField(default=datetime.datetime(2015, 5, 6, 1, 17, 17, 301520), help_text='Formato: hh:mm'),
preserve_default=True,
),
]
| btenaglia/hpc-historias-clinicas | hpc-historias-clinicas/diagnosticos/migrations/0014_auto_20150506_0117.py | Python | bsd-3-clause | 802 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Kyoukai documentation build configuration file, created by
# sphinx-quickstart on Fri Jul 22 15:11:32 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import guzzle_sphinx_theme
sys.path.insert(0, os.path.abspath('..'))
import kyoukai
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinxcontrib.asyncio',
'sphinx_autodoc_typehints',
'sphinx.ext.autosummary',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Kyoukai'
copyright = '2016-2017, Laura Dickinson'
author = 'Laura Dickinson'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = kyoukai.__version__
# The full version, including alpha/beta/rc tags.
release = kyoukai.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'manni'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# Autodoc and autosummary
# Autodoc
autosummary_generate = True
autoclass_content = 'both' # include both class docstring and __init__
autodoc_default_flags = [
# Make sure that any autodoc declarations show the right members
'members',
'inherited-members',
'private-members',
'show-inheritance',
]
# make autodoc look less... bad
autodoc_member_order = "bysource"
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = 'guzzle_sphinx_theme'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'Kyoukaidoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {}
latex_documents = [
(master_doc, 'Kyoukai.tex', 'Kyoukai Documentation',
'Isaac Dickinson', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'kyoukai', 'Kyoukai Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Kyoukai', 'Kyoukai Documentation',
author, 'Kyoukai', 'One line description of project.',
'Miscellaneous'),
]
# Map to the documentation of Python 3's stdlib.
intersphinx_mapping = {'python': ('https://docs.python.org/3/', None),
'mako': ('http://docs.makotemplates.org/en/latest/', None),
'werkzeug': ('http://werkzeug.pocoo.org/docs/0.11/', None)}
| SunDwarf/Kyoukai | docs/conf.py | Python | mit | 5,660 |
#!/usr/bin/env python
#
# Copyright 2014 Institute for Theoretical Information Technology,
# RWTH Aachen University
# www.ti.rwth-aachen.de
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
from ofdm import moms_ff
################################################################################
################################################################################
class moms_block(gr.hier_block2):
def __init__(self,delay_num,delay_denom):
gr.hier_block2.__init__(self,"moms_block",
gr.io_signature(1,1,gr.sizeof_gr_complex),
gr.io_signature(1,1,gr.sizeof_gr_complex))
cmplx_to_real = gr.complex_to_real()
cmplx_to_img = gr.complex_to_imag()
iirf_real = gr.iir_filter_ffd([1.5],[1, -0.5])
self.moms_real = moms_ff()
self.moms_real.set_init_ip_fraction(delay_num,delay_denom)
iirf_imag = gr.iir_filter_ffd([1.5],[1, -0.5])
self.moms_imag = moms_ff()
self.moms_imag.set_init_ip_fraction(delay_num,delay_denom)
float_to_cmplx = gr.float_to_complex()
self.connect((self,0), (cmplx_to_real,0))
self.connect((self,0), (cmplx_to_img,0))
self.connect((cmplx_to_real,0), (iirf_real,0))
self.connect((cmplx_to_img,0), (iirf_imag,0))
self.connect((iirf_real,0), (self.moms_real,0))
self.connect((iirf_imag,0), (self.moms_imag,0))
self.connect((self.moms_real,0), (float_to_cmplx,0))
self.connect((self.moms_imag,0), (float_to_cmplx,1))
self.connect((float_to_cmplx,0), (self,0))
def set_ip_fraction(self,a,b):
self.moms_real.set_ip_fraction(a,b)
self.moms_imag.set_ip_fraction(a,b)
def set_offset_num(self,a):
self.moms_real.set_offset_num(a)
self.moms_imag.set_offset_num(a)
| rwth-ti/gr-ofdm | python/ofdm/moms_general.py | Python | gpl-3.0 | 2,451 |
# -*- coding: utf-8 -*-
# Copyright 2014 Nicolas Bessi, Alexandre Fayolle, Camptocamp SA
# Copyright 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
def post_init_hook(cr, registry):
""" Add street3 to address format """
query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street2)s\n',
E'%(street2)s\n%(street3)s\n'
)
"""
cr.execute(query)
def uninstall_hook(cr, registry):
""" Remove street3 from address format """
# Remove %(street3)s\n from address_format
query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street3)s\n',
''
)
"""
cr.execute(query)
# Remove %(street3)s from address_format
query = """
UPDATE res_country
SET address_format = replace(
address_format,
E'%(street3)s',
''
)
"""
cr.execute(query)
| be-cloud-be/horizon-addons | partner-contact/partner_address_street3/hooks.py | Python | agpl-3.0 | 1,003 |
### Edge Bounce Behaviour ###
from Behaviour import Behaviour
from lwmath.Vector import Vector
class EdgeBounce(Behaviour):
def __init__(self):
self.min = Vector()
self.max = Vector()
super(EdgeBounce).__init__(self)
def apply(self, p, dt, index):
if p.pos.x - p.radius < self.min._x:
p.pos.x = self.min._x + p.radius
elif p.pos.x + p.radius > self.max._x:
p.pos.x = self.max._x - p.radius
if p.pos.y - p.radius < self.min._y:
p.pos.y = self.min._y + p.radius
elif p.pos.y + p.radius > self.max._y:
p.pos.y = self.max._y - p.radius
| gregroper/Pycipia | behaviour/EdgeBounce.py | Python | mit | 651 |
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
import socket
import sys
import array
import binascii
from . import packet_base
from . import packet_utils
from ryu.lib.mac import haddr_to_bin, haddr_to_str
ICMPV6_DST_UNREACH = 1 # dest unreachable, codes:
ICMPV6_PACKET_TOO_BIG = 2 # packet too big
ICMPV6_TIME_EXCEEDED = 3 # time exceeded, code:
ICMPV6_PARAM_PROB = 4 # ip6 header bad
ICMPV6_ECHO_REQUEST = 128 # echo service
ICMPV6_ECHO_REPLY = 129 # echo reply
MLD_LISTENER_QUERY = 130 # multicast listener query
MLD_LISTENER_REPOR = 131 # multicast listener report
MLD_LISTENER_DONE = 132 # multicast listener done
# RFC2292 decls
ICMPV6_MEMBERSHIP_QUERY = 130 # group membership query
ICMPV6_MEMBERSHIP_REPORT = 131 # group membership report
ICMPV6_MEMBERSHIP_REDUCTION = 132 # group membership termination
ND_ROUTER_SOLICIT = 133 # router solicitation
ND_ROUTER_ADVERT = 134 # router advertisment
ND_NEIGHBOR_SOLICIT = 135 # neighbor solicitation
ND_NEIGHBOR_ADVERT = 136 # neighbor advertisment
ND_REDIREC = 137 # redirect
ICMPV6_ROUTER_RENUMBERING = 138 # router renumbering
ICMPV6_WRUREQUEST = 139 # who are you request
ICMPV6_WRUREPLY = 140 # who are you reply
ICMPV6_FQDN_QUERY = 139 # FQDN query
ICMPV6_FQDN_REPLY = 140 # FQDN reply
ICMPV6_NI_QUERY = 139 # node information request
ICMPV6_NI_REPLY = 140 # node information reply
ICMPV6_MAXTYPE = 201
class icmpv6(packet_base.PacketBase):
_PACK_STR = '!BBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ICMPV6_TYPES = {}
@staticmethod
def register_icmpv6_type(*args):
def _register_icmpv6_type(cls):
for type_ in args:
icmpv6._ICMPV6_TYPES[type_] = cls
return cls
return _register_icmpv6_type
def __init__(self, type_, code, csum, data=None):
super(icmpv6, self).__init__()
self.type_ = type_
self.code = code
self.csum = csum
self.data = data
@classmethod
def parser(cls, buf):
(type_, code, csum) = struct.unpack_from(cls._PACK_STR, buf)
msg = cls(type_, code, csum)
offset = cls._MIN_LEN
if len(buf) > offset:
cls_ = cls._ICMPV6_TYPES.get(type_, None)
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return msg, None
def serialize(self, payload, prev):
hdr = bytearray(struct.pack(icmpv6._PACK_STR, self.type_,
self.code, self.csum))
if self.data is not None:
if self.type_ in icmpv6._ICMPV6_TYPES:
hdr += self.data.serialize()
else:
hdr += self.data
src = prev.src
dst = prev.dst
nxt = prev.nxt
if self.csum == 0:
length = len(str(hdr))
ph = struct.pack('!16s16sBBH', prev.src, prev.dst, 0, prev.nxt,
length)
f = ph + hdr + payload
if len(f) % 2:
f += '\x00'
self.csum = socket.htons(packet_utils.checksum(f))
struct.pack_into('!H', hdr, 2, self.csum)
return hdr
@icmpv6.register_icmpv6_type(ND_NEIGHBOR_SOLICIT, ND_NEIGHBOR_ADVERT)
class nd_neighbor(object):
_PACK_STR = '!I16s'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ND_OPTION_TYPES = {}
# ND option type
ND_OPTION_SLA = 1 # Source Link-Layer Address
ND_OPTION_TLA = 2 # Target Link-Layer Address
ND_OPTION_PI = 3 # Prefix Information
ND_OPTION_RH = 4 # Redirected Header
ND_OPTION_MTU = 5 # MTU
@staticmethod
def register_nd_option_type(*args):
def _register_nd_option_type(cls):
for type_ in args:
nd_neighbor._ND_OPTION_TYPES[type_] = cls
return cls
return _register_nd_option_type
def __init__(self, res, dst, type_=None, length=None, data=None):
self.res = res << 29
self.dst = dst
self.type_ = type_
self.length = length
self.data = data
@classmethod
def parser(cls, buf, offset):
(res, dst) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(res >> 29, dst)
offset += cls._MIN_LEN
if len(buf) > offset:
(msg.type_, msg.length) = struct.unpack_from('!BB', buf, offset)
cls_ = cls._ND_OPTION_TYPES.get(msg.type_, None)
offset += 2
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(nd_neighbor._PACK_STR, self.res, self.dst))
if self.type_ is not None:
hdr += bytearray(struct.pack('!BB', self.type_, self.length))
if self.type_ in nd_neighbor._ND_OPTION_TYPES:
hdr += self.data.serialize()
elif self.data is not None:
hdr += bytearray(self.data)
return hdr
@nd_neighbor.register_nd_option_type(nd_neighbor.ND_OPTION_SLA,
nd_neighbor.ND_OPTION_TLA)
class nd_option_la(object):
_PACK_STR = '!6s'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, hw_src, data=None):
self.hw_src = hw_src
self.data = data
@classmethod
def parser(cls, buf, offset):
(hw_src, ) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(hw_src)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(self._PACK_STR, self.hw_src))
if self.data is not None:
hdr += bytearray(self.data)
return hdr
@icmpv6.register_icmpv6_type(ICMPV6_ECHO_REPLY, ICMPV6_ECHO_REQUEST)
class echo(object):
_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, id_, seq, data=None):
self.id = id_
self.seq = seq
self.data = data
@classmethod
def parser(cls, buf, offset):
(id_, seq) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(id_, seq)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(echo._PACK_STR, self.id,
self.seq))
if self.data is not None:
hdr += bytearray(self.data)
return hdr
| 09zwcbupt/ryu | ryu/lib/packet/icmpv6.py | Python | apache-2.0 | 7,229 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp import netsvc
from openerp.netsvc import Service
from num2words import num2words
import datetime
try:
del Service._services['report.conditional.deed.of.sale']
except:
pass
from openerp.report import report_sxw
class cdos(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context=None):
super(cdos, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'num2words':num2words,
'ordinal':self._get_ordinal,
'check_lines':self._get_check_lines,
# 'product_lines':self._get_product_lines,
'get_cdos_month':self._get_cdos_month,
})
def _get_cdos_month(self,cdos_obj,context=None):
target_date=cdos_obj.cdos_date
temp=datetime.datetime.strptime(target_date,'%Y-%m-%d')
return temp.strftime('%B')
def _get_product_lines(self,invoice_obj,context=None):
#fetch connected sale order
self.cr.execute('select order_id from sale_order_invoice_rel where invoice_id = %s' % invoice_id)
target_sale_order_ids=[x[0] for x in self.cr.fetchall()]
#fetch connected delivery order move lines
self.cr.execute('select move.id from stock_picking_out do inner join stock_move move on (move.picking_id = do.id) where do.sale_id in (%s)' % str(target_sale_order_ids)[1:-1])
target_stock_move_ids=[x[0] for x in self.cr.fetchall()]
res = self.pool.get('stock.move').browse(self.cr,self.uid,target_stock_move_ids)
return res
def _get_check_lines(self,invoice_id,context=None):
res = []
return res
def _get_ordinal(self,n,context=None):
if 10 < n < 14: return u'%sth' % n
if n % 10 == 1: return u'%sst' % n
if n % 10 == 2: return u'%snd' % n
if n % 10 == 3: return u'%srd' % n
return u'%sth' % n
report_sxw.report_sxw('report.conditional.deed.of.sale', 'glimsol.conditional.deed.of.sale', 'addons/glimsol_check/report/conditional_deed_of_sale.rml', parser=cdos)
| romeoabulencia/glimsol_check | report/cdos.py | Python | gpl-2.0 | 3,179 |
"""
Copyright 2018 Attila Szollosi
This file is part of pmbootstrap.
pmbootstrap is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
pmbootstrap is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pmbootstrap. If not, see <http://www.gnu.org/licenses/>.
"""
import logging
import pmb.chroot
import pmb.flasher
import pmb.helpers.frontend
def create_zip(args, suffix):
"""
Create android recovery compatible installer zip.
"""
zip_root = "/var/lib/postmarketos-android-recovery-installer/"
rootfs = "/mnt/rootfs_" + args.device
flavor = pmb.helpers.frontend._parse_flavor(args)
method = args.deviceinfo["flash_method"]
vars = pmb.flasher.variables(args, flavor, method)
# Install recovery installer package in buildroot
pmb.chroot.apk.install(args,
["postmarketos-android-recovery-installer"],
suffix)
logging.info("(" + suffix + ") create recovery zip")
# Create config file for the recovery installer
options = {
"DEVICE": args.device,
"FLAVOR": flavor,
"FLASH_KERNEL": args.recovery_flash_kernel,
"ISOREC": method == "heimdall-isorec",
"KERNEL_PARTLABEL": vars["$PARTITION_KERNEL"],
"INITFS_PARTLABEL": vars["$PARTITION_INITFS"],
"SYSTEM_PARTLABEL": vars["$PARTITION_SYSTEM"],
"INSTALL_PARTITION": args.recovery_install_partition,
"CIPHER": args.cipher,
"FDE": args.full_disk_encryption,
}
# Write to a temporary file
config_temp = args.work + "/chroot_" + suffix + "/tmp/install_options"
with open(config_temp, "w") as handle:
for key, value in options.items():
if isinstance(value, bool):
value = str(value).lower()
handle.write(key + "='" + value + "'\n")
commands = [
# Move config file from /tmp/ to zip root
["mv", "/tmp/install_options", "chroot/install_options"],
# Create tar archive of the rootfs
["tar", "-pcf", "rootfs.tar", "--exclude", "./home", "-C", rootfs,
"."],
# Append packages keys
["tar", "-prf", "rootfs.tar", "-C", "/", "./etc/apk/keys"],
# Compress with -1 for speed improvement
["gzip", "-f1", "rootfs.tar"],
["build-recovery-zip", args.device]]
for command in commands:
pmb.chroot.root(args, command, suffix, working_dir=zip_root)
| postmarketOS/pmbootstrap | pmb/install/recovery.py | Python | gpl-3.0 | 2,841 |
"""This module defines the Form class, a subclass of Views that can manage
widgets."""
import spyral
import operator
import inspect
class _FormFieldMeta(type):
"""
Black magic for wrapping widgets defined as class attributes. See python
documentation on overriding Python
`__metaclass__ <http://docs.python.org/2/reference/datamodel.html#customizing-class-creation>`_
for more information.
"""
def __new__(meta, name, bases, dict):
cls = type.__new__(meta, name, bases, dict)
is_wrapper = lambda obj: isinstance(obj, spyral.widgets._WidgetWrapper)
cls.fields = sorted(inspect.getmembers(cls, is_wrapper),
key=lambda i: i[1].creation_counter)
return cls
class Form(spyral.View):
"""
Forms are a subclass of :class:`Views <spyral.View>` that hold a set of
:ref:`Widgets <api.widgets>`. Forms will manage focus and event delegation between the widgets,
ensuring that only one widget is active at a given time. Forms are defined
using a special class-based syntax::
class MyForm(spyral.Form):
name = spyral.widgets.TextInput(100, "Current Name")
remember_me = spyral.widgets.Checkbox()
save = spyral.widgets.ToggleButton("Save")
my_form = MyForm()
When referencing widgets in this way, the "Widget" part of the widget's name
is dropped: ``spyral.widgets.ButtonWidget`` becomes ``spyral.widgets.Button``.
Every widget in a form is accessible as an attribute of the form:
>>> print my_form.remember_me.value
"up"
:param scene: The Scene or View that this Form belongs to.
:type scene: :class:`Scene <spyral.Scene>` or :class:`View <spyral.View>`.
"""
__metaclass__ = _FormFieldMeta
def __init__(self, scene):
spyral.View.__init__(self, scene)
class Fields(object):
pass
# Maintain a list of all the widget instances
self._widgets = []
# Map each widget instance to its tab order
self._tab_orders = {}
# The instance of the currently focused widget
self._current_focus = None
# The instance of the currently mouse-overed widget
self._mouse_currently_over = None
# The instance of the currently mouse-downed widget
self._mouse_down_on = None
spyral.event.register("input.mouse.up.left", self._handle_mouse_up,
scene=scene)
spyral.event.register("input.mouse.down.left", self._handle_mouse_down,
scene=scene)
spyral.event.register("input.mouse.motion", self._handle_mouse_motion,
scene=scene)
spyral.event.register("input.keyboard.down.tab", self._handle_tab,
scene=scene)
spyral.event.register("input.keyboard.up.tab", self._handle_tab,
scene=scene)
spyral.event.register("input.keyboard.up", self._handle_key_up,
scene=scene)
spyral.event.register("input.keyboard.down", self._handle_key_down,
scene=scene)
fields = self.fields
self.fields = Fields()
for name, widget in fields:
w = widget(self, name)
setattr(w, "name", name)
setattr(self, name, w)
self.add_widget(name, w)
self.focus()
def _handle_mouse_up(self, event):
"""
Delegate the mouse being released to the widget that is currently being
clicked.
:param event: The associated event data.
:type event: :class:`Event <spyral.Event>`
"""
if self._mouse_down_on is None:
return False
self._mouse_down_on._handle_mouse_up(event)
self._mouse_down_on = None
def _handle_mouse_down(self, event):
"""
Delegate the mouse being clicked down to any widget that it is currently
hovering over.
:param event: The associated event data.
:type event: :class:`Event <spyral.Event>`
"""
for widget in self._widgets:
if widget.collide_point(event.pos):
self.focus(widget)
self._mouse_down_on = widget
widget._handle_mouse_down(event)
return True
return False
def _handle_mouse_motion(self, event):
"""
Delegate the mouse being hovered over any widget that it is currently
hovering over. If the widget being hovered over is no longer the
previous widget that was being hovered over, it notifies the old widget
(mouse out event) and the new widget (mouse over event).
:param event: The associated event data.
:type event: :class:`Event <spyral.Event>`
"""
if self._mouse_down_on is not None:
self._mouse_down_on._handle_mouse_motion(event)
now_hover = None
for widget in self._widgets:
if widget.collide_point(event.pos):
widget._handle_mouse_motion(event)
now_hover = widget
if now_hover != self._mouse_currently_over:
if self._mouse_currently_over is not None:
self._mouse_currently_over._handle_mouse_out(event)
self._mouse_currently_over = now_hover
if now_hover is not None:
now_hover._handle_mouse_over(event)
def _handle_tab(self, event):
"""
If this form has focus, advances to the next widget in the tab order.
Unless the shift key is held, in which case the previous widget is
focused.
:param event: The associated event data.
:type event: :class:`Event <spyral.Event>`
"""
if self._current_focus is None:
return
if event.type == 'down':
return True
if event.mod & spyral.mods.shift:
self.previous()
return True
self.next()
return True
def _handle_key_down(self, event):
"""
Notifies the currently focused widget that a key has been pressed.
:param event: The associated event data.
:type event: :class:`Event <spyral.Event>`
"""
if self._current_focus is not None:
self._current_focus._handle_key_down(event)
def _handle_key_up(self, event):
"""
Notifies the currently focused widget that a key has been released.
:param event: The associated event data.
:type event: :class:`Event <spyral.Event>`
"""
if self._current_focus is not None:
self._current_focus._handle_key_up(event)
def add_widget(self, name, widget, tab_order=None):
"""
Adds a new widget to this form. When this method is used to add a Widget
to a Form, you create the Widget as you would create a normal Sprite. It
is preferred to use the class-based method instead of this; consider
carefully whether you can achieve dynamicity through visibility and
disabling.
>>> my_widget = spyral.widgets.ButtonWidget(my_form, "save")
>>> my_form.add_widget("save", my_widget)
:param str name: A unique name for this widget.
:param widget: The new Widget.
:type widget: :ref:`Widget <api.widgets>`
:param int tab_order: Sets the tab order for this widget explicitly. If
tab-order is None, it is set to one higher than
the highest tab order.
"""
if tab_order is None:
if len(self._tab_orders) > 0:
tab_order = max(self._tab_orders.itervalues())+1
else:
tab_order = 0
self._tab_orders[widget] = tab_order
self._widgets.append(widget)
#self.add_child(widget)
setattr(self.fields, name, widget)
def _get_values(self):
"""
A dictionary of the values for all the fields, mapping the name
of each widget with the value associated with that widget. Read-only.
"""
return dict((widget.name, widget.value) for widget in self._widgets)
values = property(_get_values)
def _blur(self, widget):
"""
Queues an event indicating that a widget has lost focus.
:param widget: The widget that is losing focus.
:type widget: :ref:`Widget <api.widgets>`
"""
e = spyral.Event(name="blurred", widget=widget, form=self)
self.scene._queue_event("form.%(form_name)s.%(widget)s.blurred" %
{"form_name": self.__class__.__name__,
"widget": widget.name},
e)
widget._handle_blur(e)
def focus(self, widget=None):
"""
Sets the focus to be on a specific widget. Focus by default goes
to the first widget added to the form.
:param widget: The widget that is gaining focus; if None, then the first
widget gains focus.
:type widget: :ref:`Widget <api.widgets>`
"""
# By default, we focus on the first widget added to the form
if widget is None:
if not self._widgets:
return
widget = min(self._tab_orders.iteritems(),
key=operator.itemgetter(1))[0]
# If we'd focused on something before, we blur it
if self._current_focus is not None:
self._blur(self._current_focus)
# We keep track of our newly focused thing
self._current_focus = widget
# Make and send the "focused" event
e = spyral.Event(name="focused", widget=widget, form=self)
self.scene._queue_event("form.%(form_name)s.%(widget)s.focused" %
{"form_name": self.__class__.__name__,
"widget": widget.name},
e)
widget._handle_focus(e)
return
def blur(self):
"""
Defocuses the entire form.
"""
if self._current_focus is not None:
self._blur(self._current_focus)
self._current_focus = None
def next(self, wrap=True):
"""
Focuses on the next widget in tab order.
:param bool wrap: Whether to continue to the first widget when the end
of the tab order is reached.
"""
if self._current_focus is None:
self.focus()
return
if not self._widgets:
return
cur = self._tab_orders[self._current_focus]
candidates = [(widget, order) for (widget, order)
in self._tab_orders.iteritems()
if order > cur]
if len(candidates) == 0:
if not wrap:
return
widget = None
else:
widget = min(candidates, key=operator.itemgetter(1))[0]
self._blur(self._current_focus)
self._current_focus = None
self.focus(widget)
def previous(self, wrap=True):
"""
Focuses the previous widget in tab order.
:param bool wrap: Whether to continue to the last widget when the first
of the tab order is reached.
"""
if self._current_focus is None:
self.focus()
return
if not self._widgets:
return
cur = self._tab_orders[self._current_focus]
candidates = [(widget, order) for (widget, order)
in self._tab_orders.iteritems()
if order < cur]
if len(candidates) == 0:
if not wrap:
return
widget = max(self._tab_orders.iteritems(),
key=operator.itemgetter(1))[0]
else:
widget = max(candidates, key=operator.itemgetter(1))[0]
self._blur(self._current_focus)
self._current_focus = None
self.focus(widget)
| platipy/spyral | spyral/form.py | Python | lgpl-2.1 | 12,227 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-25 01:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workout', '0006_auto_20161024_2135'),
]
operations = [
migrations.AddField(
model_name='exercise',
name='slug',
field=models.SlugField(default='unknown', max_length=140, unique=True),
preserve_default=False,
),
]
| audiolion/py-fitness | py_fitness/py_fitness/workout/migrations/0007_exercise_slug.py | Python | mit | 518 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Will Thames <@willthames>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: k8s_info
short_description: Describe Kubernetes (K8s) objects
version_added: "2.7"
author:
- "Will Thames (@willthames)"
description:
- Use the OpenShift Python client to perform read operations on K8s objects.
- Access to the full range of K8s APIs.
- Authenticate using either a config file, certificates, password or token.
- Supports check mode.
- This module was called C(k8s_facts) before Ansible 2.9. The usage did not change.
options:
api_version:
description:
- Use to specify the API version. in conjunction with I(kind), I(name), and I(namespace) to identify a
specific object.
default: v1
aliases:
- api
- version
kind:
description:
- Use to specify an object model. Use in conjunction with I(api_version), I(name), and I(namespace) to identify a
specific object.
required: yes
name:
description:
- Use to specify an object name. Use in conjunction with I(api_version), I(kind) and I(namespace) to identify a
specific object.
namespace:
description:
- Use to specify an object namespace. Use in conjunction with I(api_version), I(kind), and I(name)
to identify a specific object.
label_selectors:
description: List of label selectors to use to filter results
field_selectors:
description: List of field selectors to use to filter results
extends_documentation_fragment:
- k8s_auth_options
requirements:
- "python >= 2.7"
- "openshift >= 0.6"
- "PyYAML >= 3.11"
'''
EXAMPLES = '''
- name: Get an existing Service object
k8s_info:
api_version: v1
kind: Service
name: web
namespace: testing
register: web_service
- name: Get a list of all service objects
k8s_info:
api_version: v1
kind: Service
namespace: testing
register: service_list
- name: Get a list of all pods from any namespace
k8s_info:
kind: Pod
register: pod_list
- name: Search for all Pods labelled app=web
k8s_info:
kind: Pod
label_selectors:
- app = web
- tier in (dev, test)
- name: Search for all running pods
k8s_info:
kind: Pod
field_selectors:
- status.phase=Running
'''
RETURN = '''
resources:
description:
- The object(s) that exists
returned: success
type: complex
contains:
api_version:
description: The versioned schema of this representation of an object.
returned: success
type: str
kind:
description: Represents the REST resource this object represents.
returned: success
type: str
metadata:
description: Standard object metadata. Includes name, namespace, annotations, labels, etc.
returned: success
type: dict
spec:
description: Specific attributes of the object. Will vary based on the I(api_version) and I(kind).
returned: success
type: dict
status:
description: Current status details for the object.
returned: success
type: dict
'''
from ansible.module_utils.k8s.common import KubernetesAnsibleModule, AUTH_ARG_SPEC
import copy
class KubernetesInfoModule(KubernetesAnsibleModule):
def __init__(self, *args, **kwargs):
KubernetesAnsibleModule.__init__(self, *args,
supports_check_mode=True,
**kwargs)
if self._name == 'k8s_facts':
self.deprecate("The 'k8s_facts' module has been renamed to 'k8s_info'",
version='2.13', collection_name='ansible.builtin')
def execute_module(self):
self.client = self.get_api_client()
self.exit_json(changed=False,
**self.kubernetes_facts(self.params['kind'],
self.params['api_version'],
self.params['name'],
self.params['namespace'],
self.params['label_selectors'],
self.params['field_selectors']))
@property
def argspec(self):
args = copy.deepcopy(AUTH_ARG_SPEC)
args.update(
dict(
kind=dict(required=True),
api_version=dict(default='v1', aliases=['api', 'version']),
name=dict(),
namespace=dict(),
label_selectors=dict(type='list', default=[]),
field_selectors=dict(type='list', default=[]),
)
)
return args
def main():
KubernetesInfoModule().execute_module()
if __name__ == '__main__':
main()
| azaghal/ansible | test/support/integration/plugins/modules/k8s_info.py | Python | gpl-3.0 | 5,086 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import fluent_contents.plugins.oembeditem.fields
class Migration(migrations.Migration):
dependencies = [
('gk_collections_moving_image', '0002_auto_20161026_1312'),
]
operations = [
migrations.AddField(
model_name='movingimagework',
name='trailer',
field=fluent_contents.plugins.oembeditem.fields.OEmbedUrlField(help_text='Enter the URL of the online content to embed (e.g. a YouTube or Vimeo video, SlideShare presentation, etc..)', blank=True),
),
]
| ic-labs/glamkit-collections | glamkit_collections/contrib/work_creator/plugins/moving_image/migrations/0003_movingimagework_trailer.py | Python | mit | 638 |
# -*- coding: utf-8 -*-
from .dropboxAdapter import DropboxAdapter
from .yesDocAdapter import YesDocAdapter
from .driveAdapter import DriveAdapter
# Se importa último, ya que hace uso del resto de adaptadores.
from .fileManagerFactory import FileManagerFactory
| lightning-round/salud-api | app/mod_profiles/adapters/__init__.py | Python | gpl-2.0 | 264 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for data_provider."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
import data_provider
class DataProviderTest(tf.test.TestCase):
def test_cifar10_train_set(self):
dataset_dir = os.path.join(
tf.flags.FLAGS.test_srcdir,
'google3/third_party/tensorflow_models/gan/cifar/testdata')
batch_size = 4
images, labels, num_samples, num_classes = data_provider.provide_data(
batch_size, dataset_dir)
self.assertEqual(50000, num_samples)
self.assertEqual(10, num_classes)
with self.test_session(use_gpu=True) as sess:
with tf.contrib.slim.queues.QueueRunners(sess):
images_out, labels_out = sess.run([images, labels])
self.assertEqual(images_out.shape, (batch_size, 32, 32, 3))
expected_label_shape = (batch_size, 10)
self.assertEqual(expected_label_shape, labels_out.shape)
# Check range.
self.assertTrue(np.all(np.abs(images_out) <= 1))
if __name__ == '__main__':
tf.test.main()
| jiaphuan/models | research/gan/cifar/data_provider_test.py | Python | apache-2.0 | 1,803 |
import copy
import unittest
try:
from unittest import mock
except ImportError:
# Python 3.2 does not have mock in the standard library
import mock
import yaml
from sauna import Sauna, _merge_config
class ConfigTest(unittest.TestCase):
def test_dict_conf(self):
dict_conf = {
"plugins": {
"Disk": {
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
}
]
}
}
}
expected_result = [
{
'type': 'Disk',
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
}
]
}
]
sauna = Sauna(config=dict_conf)
self.assertEqual(sauna.plugins_checks, expected_result)
def test_list_conf(self):
list_conf = {
"plugins": [
{
'type': 'Disk',
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
}
]
}
]
}
sauna = Sauna(config=list_conf)
self.assertEqual(sauna.plugins_checks, list_conf['plugins'])
def test_complex_dict_conf(self):
dict_conf = {
"plugins": {
"Disk": {
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
},
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
}
]
},
"Memory": {
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
},
]
}
}
}
expected_result = [
{
'type': 'Disk',
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
},
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
}
]
},
{
"type": "Memory",
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
},
]
}
]
sauna = Sauna(config=dict_conf)
self.assertEqual(len(sauna.plugins_checks), len(expected_result))
for elem in sauna.plugins_checks:
self.assertIn(elem, expected_result, 'missing element')
def test_consumers_dict_conf(self):
dict_conf = {
'consumers': {
'NSCA': {
'foo': 'bar'
},
'Stdout': None
}
}
expected_result = [
{
'type': 'NSCA',
'foo': 'bar'
},
{
'type': 'Stdout',
}
]
sauna = Sauna(config=dict_conf)
for r in expected_result:
self.assert_(r in sauna.consumers)
def test_consumers_list_conf(self):
list_conf = {
'consumers': [
{
'type': 'NSCA',
'foo': 'bar'
},
{
'type': 'Stdout',
}
]
}
sauna = Sauna(config=list_conf)
for r in list_conf['consumers']:
self.assert_(r in sauna.consumers)
def test_merge_config(self):
original = {
'periodicity': 60,
'consumers': {
'Stdout': {}
},
'plugins': [
{
'type': 'Disk',
"config": {
"myconf": "myvalue"
},
"checks": [
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
},
{
"type": "used_percent",
"warn": "80%",
"crit": "90%"
}
]
}
]
}
# Not changing anthing
expected = copy.deepcopy(original)
_merge_config(original, {})
self.assertDictEqual(original, expected)
# Adding a consumer
expected['consumers']['NSCA'] = {}
_merge_config(original, {'consumers': {'NSCA': {}}})
self.assertDictEqual(original, expected)
# Adding a plugin
expected['plugins'].append({'type': 'Load'})
_merge_config(original, {'plugins': [{'type': 'Load'}]})
self.assertDictEqual(original, expected)
# Adding a root property
expected['hostname'] = 'host-1.domain.tld'
_merge_config(original, {'hostname': 'host-1.domain.tld'})
self.assertDictEqual(original, expected)
# Appending to a non existent list
expected['extra_plugins'] = ['/opt/plugins1', '/opt/plugins2']
_merge_config(original,
{'extra_plugins': ['/opt/plugins1', '/opt/plugins2']})
self.assertDictEqual(original, expected)
def test_assemble_config_sample(self):
mock_open = mock.mock_open()
sauna_instance = Sauna()
with mock.patch('builtins.open', mock_open):
sauna_instance.assemble_config_sample('/foo')
mock_open.assert_called_once_with('/foo/sauna-sample.yml', 'w')
f = mock_open()
generated_yaml_string = f.write.call_args[0][0]
# Will raise a yaml error if generated content is not valid yaml
yaml.safe_load(generated_yaml_string)
def test_conf_with_concurrency_instantiates_threadpool(self):
original = {
'periodicity': 60,
'concurrency': 5,
'consumers': {
'Stdout': {}
},
'plugins': []
}
sauna = Sauna(config=original)
self.assertIsNotNone(sauna._thread_pool)
def test_conf_without_concurrency_no_threadpool(self):
original = {
'periodicity': 60,
'consumers': {
'Stdout': {},
},
'plugins': []
}
sauna = Sauna(config=original)
self.assertIsNone(sauna._thread_pool)
| bewiwi/sauna | tests/test_config.py | Python | bsd-2-clause | 8,010 |
import helper
if __name__ == "__main__":
helper.greeting("go hoos")
| tsteining/cs3240-labdemo | steining.py | Python | mit | 70 |
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions for Drydock database tables."""
from sqlalchemy.schema import Table, Column
from sqlalchemy.types import Boolean, DateTime, String, Integer, Text
from sqlalchemy.dialects import postgresql as pg
class ExtendTable(Table):
def __new__(cls, metadata):
self = super().__new__(cls, cls.__tablename__, metadata,
*cls.__schema__)
return self
class Tasks(ExtendTable):
"""Table for persisting Tasks."""
__tablename__ = 'tasks'
__schema__ = [
Column('task_id', pg.BYTEA(16), primary_key=True),
Column('parent_task_id', pg.BYTEA(16)),
Column('subtask_id_list', pg.ARRAY(pg.BYTEA(16))),
Column('result_status', String(32)),
Column('result_message', String(128)),
Column('result_reason', String(128)),
Column('result_error_count', Integer),
Column('result_successes', pg.ARRAY(String(32))),
Column('result_failures', pg.ARRAY(String(32))),
Column('retry', Integer),
Column('status', String(32)),
Column('created', DateTime),
Column('created_by', String(16)),
Column('updated', DateTime),
Column('design_ref', String(128)),
Column('request_context', pg.JSON),
Column('node_filter', pg.JSON),
Column('action', String(32)),
Column('terminated', DateTime),
Column('terminated_by', String(16)),
Column('terminate', Boolean, default=False)
]
class ResultMessage(ExtendTable):
"""Table for tracking result/status messages."""
__tablename__ = 'result_message'
__schema__ = [
Column('sequence', Integer, primary_key=True),
Column('task_id', pg.BYTEA(16)),
Column('message', String(1024)),
Column('error', Boolean),
Column('context', String(64)),
Column('context_type', String(16)),
Column('ts', DateTime),
Column('extra', pg.JSON)
]
class ActiveInstance(ExtendTable):
"""Table to organize multiple orchestrator instances."""
__tablename__ = 'active_instance'
__schema__ = [
Column('dummy_key', Integer, primary_key=True),
Column('identity', pg.BYTEA(16)),
Column('last_ping', DateTime),
]
class BootAction(ExtendTable):
"""Table persisting node build data."""
__tablename__ = 'boot_action'
__schema__ = [
Column('node_name', String(280), primary_key=True),
Column('task_id', pg.BYTEA(16)),
Column('identity_key', pg.BYTEA(32)),
]
class BootActionStatus(ExtendTable):
"""Table tracking status of node boot actions."""
__tablename__ = 'boot_action_status'
__schema__ = [
Column('node_name', String(280), index=True),
Column('action_id', pg.BYTEA(16), primary_key=True),
Column('action_name', String(64)),
Column('task_id', pg.BYTEA(16), index=True),
Column('identity_key', pg.BYTEA(32)),
Column('action_status', String(32)),
]
class BuildData(ExtendTable):
"""Table for persisting node build data."""
__tablename__ = 'build_data'
__schema__ = [
Column('node_name', String(32), index=True),
Column('task_id', pg.BYTEA(16), index=True),
Column('collected_date', DateTime),
Column('generator', String(256)),
Column('data_format', String(32)),
Column('data_element', Text),
]
| att-comdev/drydock | drydock_provisioner/statemgmt/db/tables.py | Python | apache-2.0 | 4,033 |
import numpy
import numpy.linalg as la
import clq
import clq.backends.opencl.pyopencl as cl
from clq.backends.opencl import get_global_id
a = numpy.random.rand(50000).astype(numpy.float32)
b = numpy.random.rand(50000).astype(numpy.float32)
@clq.fn
def ew_add(a, b, dest):
gid = get_global_id(0)
dest[gid] = a[gid] + b[gid]
ctx = cl.ctx = cl.Context.for_device(0, 0)
a_buf = ctx.to_device(a)
b_buf = ctx.to_device(b)
dest_buf = ctx.alloc(like=a)
ew_add(a_buf, b_buf, dest_buf, global_size=a.shape, local_size=(1,)).wait()
c = ctx.from_device(dest_buf)
print la.norm(c - (a + b))
| cyrus-/ace | tests/ew_add.py | Python | lgpl-3.0 | 596 |
# coding=utf-8
# Copyright 2020 Google LLC..
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| google/shopping-markup | plugins/cloud_utils/__init__.py | Python | apache-2.0 | 1,168 |
# Copyright (C) 2015 Brent Baude <[email protected]>
# Copyright (C) 2015 Red Hat Inc., Durham, North Carolina.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with openscap-daemon. If not, see <http://www.gnu.org/licenses/>.
import os
import collections
import time
import logging
import subprocess
import xml.etree.ElementTree as ET
import platform
import sys
import bz2
from threading import Lock
if sys.version_info < (3,):
from StringIO import StringIO
else:
from io import StringIO
class Scan(object):
# Fix race-condition in atomic mount/unmount
# We don't want to do mount and unmount simultaneously
_mount_lock = Lock()
def __init__(self, image_uuid, con_uuids, output, appc, mnt_dir="/tmp"):
self.mnt_dir = mnt_dir
self.image_name = image_uuid
self.ac = appc
self.CVEs = collections.namedtuple('CVEs', 'title, severity,'
'cve_ref_id, cve_ref_url,'
'rhsa_ref_id, rhsa_ref_url')
self.list_of_CVEs = []
self.con_uuids = con_uuids
self.output = output
self.report_dir = os.path.join(self.ac.workdir, "reports")
if not os.path.exists(self.report_dir):
os.mkdir(self.report_dir)
start = time.time()
from Atomic.mount import DockerMount
self.DM = DockerMount(self.mnt_dir, mnt_mkdir=True)
with Scan._mount_lock:
self.dm_results = self.DM.mount(image_uuid)
logging.debug("Created scanning chroot in {0}"
" seconds".format(time.time() - start))
self.dest = self.dm_results
def get_release(self):
etc_release_path = os.path.join(self.dest, "rootfs",
"etc/redhat-release")
if not os.path.exists(etc_release_path):
logging.info("{0} is not RHEL based".format(self.image_name))
return False
self.os_release = open(etc_release_path).read()
rhel = 'Red Hat Enterprise Linux'
if rhel in self.os_release:
logging.debug("{0} is {1}".format(self.image_name,
self.os_release.rstrip()))
return True
else:
logging.info("{0} is {1}".format(self.image_name,
self.os_release.rstrip()))
return False
def scan(self):
logging.debug("Scanning chroot {0}".format(self.image_name))
hostname = open("/etc/hostname").read().rstrip()
os.environ["OSCAP_PROBE_ARCHITECTURE"] = platform.processor()
os.environ["OSCAP_PROBE_ROOT"] = os.path.join(self.dest, "rootfs")
os.environ["OSCAP_PROBE_OS_NAME"] = platform.system()
os.environ["OSCAP_PROBE_OS_VERSION"] = platform.release()
os.environ["OSCAP_PROBE_"
"PRIMARY_HOST_NAME"] = "{0}:{1}".format(hostname,
self.image_name)
from oscap_docker_python.get_cve_input import getInputCVE
# We only support RHEL 6|7 in containers right now
osc = getInputCVE("/tmp")
if "Red Hat Enterprise Linux" in self.os_release:
if "7." in self.os_release:
self.chroot_cve_file = os.path.join(
self.ac.workdir, osc.dist_cve_name.format("7"))
if "6." in self.os_release:
self.chroot_cve_file = os.path.join(
self.ac.workdir, osc.dist_cve_name.format("6"))
cmd = ['oscap', 'oval', 'eval', '--report',
os.path.join(self.report_dir,
self.image_name + '.html'),
'--results',
os.path.join(self.report_dir,
self.image_name + '.xml'), self.chroot_cve_file]
logging.debug(
"Starting evaluation with command '%s'.",
" ".join(cmd))
try:
self.result = subprocess.check_output(cmd).decode("utf-8")
except Exception:
pass
# def capture_run(self, cmd):
# '''
# Subprocess command that captures and returns the output and
# return code.
# '''
# r = subprocess.Popen(cmd, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# return r.communicate(), r.returncode
def get_cons(self, fcons, short_iid):
cons = []
for image in fcons:
if image.startswith(short_iid):
for con in fcons[image]:
cons.append(con['uuid'][:12])
return cons
def report_results(self):
if not os.path.exists(self.chroot_cve_file):
from openscap_daemon.cve_scanner.scanner_error import ImageScannerClientError
raise ImageScannerClientError("Unable to find {0}"
.format(self.chroot_cve_file))
return False
cve_tree = ET.parse(bz2.BZ2File(self.chroot_cve_file))
self.cve_root = cve_tree.getroot()
for line in self.result.splitlines():
split_line = line.split(':')
# Not in love with how I did this
# Should find a better marked to know if it is a line
# a parsable line.
if (len(split_line) == 5) and ('true' in split_line[4]):
self._return_xml_values(line.split()[1][:-1])
sev_dict = {}
sum_log = StringIO()
sum_log.write("Image: {0} ({1})".format(self.image_name,
self.os_release))
cons = self.get_cons(self.ac.fcons, self.image_name)
sum_log.write("\nContainers based on this image ({0}): {1}\n"
.format(len(cons), ", ".join(cons)))
for sev in ['Critical', 'Important', 'Moderate', 'Low']:
sev_counter = 0
for cve in self.list_of_CVEs:
if cve.severity == sev:
sev_counter += 1
sum_log.write("\n")
fields = list(self.CVEs._fields)
fields.remove('title')
sum_log.write("{0}{1}: {2}\n"
.format(" " * 5, "Title",
getattr(cve, "title")))
for field in fields:
sum_log.write("{0}{1}: {2}\n"
.format(" " * 10, field.title(),
getattr(cve, field)))
sev_dict[sev] = sev_counter
self.output.list_of_outputs.append(
self.output.output(iid=self.image_name, cid=self.con_uuids,
os=self.os_release, sevs=sev_dict,
log=sum_log.getvalue(), msg=None))
sum_log.close()
def _report_not_rhel(self, image):
msg = "{0} is not based on RHEL".format(image[:8])
self.output.list_of_outputs.append(
self.output.output(iid=image, cid=None,
os=None, sevs=None,
log=None, msg=msg))
def _return_xml_values(self, cve):
cve_string = ("{http://oval.mitre.org/XMLSchema/oval-definitions-5}"
"definitions/*[@id='%s']" % cve)
cve_xml = self.cve_root.find(cve_string)
title = cve_xml.find("{http://oval.mitre.org/XMLSchema/oval-"
"definitions-5}metadata/"
"{http://oval.mitre.org/XMLSchema/"
"oval-definitions-5}title")
cve_id = cve_xml.find("{http://oval.mitre.org/XMLSchema/"
"oval-definitions-5}metadata/{http://oval.mitre."
"org/XMLSchema/oval-definitions-5}reference"
"[@source='CVE']")
sev = (cve_xml.find("{http://oval.mitre.org/XMLSchema/oval-definitions"
"-5}metadata/{http://oval.mitre.org/XMLSchema/oval"
"-definitions-5}advisory/")).text
if cve_id is not None:
cve_ref_id = cve_id.attrib['ref_id']
cve_ref_url = cve_id.attrib['ref_url']
else:
cve_ref_id = None
cve_ref_url = None
rhsa_id = cve_xml.find("{http://oval.mitre.org/XMLSchema/oval-"
"definitions-5}metadata/{http://oval.mitre.org"
"/XMLSchema/oval-definitions-5}reference"
"[@source='RHSA']")
if rhsa_id is not None:
rhsa_ref_id = rhsa_id.attrib['ref_id']
rhsa_ref_url = rhsa_id.attrib['ref_url']
else:
rhsa_ref_id = None
rhsa_ref_url = None
self.list_of_CVEs.append(
self.CVEs(title=title.text, cve_ref_id=cve_ref_id,
cve_ref_url=cve_ref_url, rhsa_ref_id=rhsa_ref_id,
rhsa_ref_url=rhsa_ref_url, severity=sev))
def _get_rpms(self):
# TODO: External dep!
import rpm
chroot_os = os.path.join(self.dest, "rootfs")
ts = rpm.TransactionSet(chroot_os)
ts.setVSFlags((rpm._RPMVSF_NOSIGNATURES | rpm._RPMVSF_NODIGESTS))
image_rpms = []
for hdr in ts.dbMatch(): # No sorting
if hdr['name'] == 'gpg-pubkey':
continue
else:
foo = "{0}-{1}-{2}-{3}-{4}".format(hdr['name'],
hdr['epochnum'],
hdr['version'],
hdr['release'],
hdr['arch'])
image_rpms.append(foo)
return image_rpms
def unmount(self):
with Scan._mount_lock:
self.DM.unmount_path(self.dest)
self.DM._clean_temp_container_by_path(self.dest)
os.rmdir(self.dest)
| pthierry38/openscap-daemon | openscap_daemon/cve_scanner/scan.py | Python | lgpl-2.1 | 10,634 |
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class SoftwareInfoList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
SoftwareInfoList - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'software': 'list[SoftwareInfo]'
}
self.attribute_map = {
'software': 'software'
}
self._software = None
@property
def software(self):
"""
Gets the software of this SoftwareInfoList.
:return: The software of this SoftwareInfoList.
:rtype: list[SoftwareInfo]
"""
return self._software
@software.setter
def software(self, software):
"""
Sets the software of this SoftwareInfoList.
:param software: The software of this SoftwareInfoList.
:type: list[SoftwareInfo]
"""
self._software = software
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| realms-team/basestation-fw | libs/smartmeshsdk-REL-1.3.0.1/libs/VManagerSDK/vmanager/models/software_info_list.py | Python | bsd-3-clause | 3,079 |
#!/usr/bin/env python2
#
# Copyright 2015 by Ss Cyril and Methodius University in Skopje, Macedonia
# Copyright 2015 by Idiap Research Institute in Martigny, Switzerland
#
# See the file COPYING for the licence associated with this software.
#
# Author(s):
# Branislav Gerazov, October 2015
# Aleksandar Gjoreski, October 2015
# Pierre-Edouard Honnet, October 2015
#
import numpy as np
import numpy.linalg as linalg
import math
class Atom:
"""
A superclass for all types of atoms. The method get_curve will be common
to all classes of atoms and will have to be implemented separately.
So it will be possible to make operations with the atom without knowing its class
"""
def __init__(self, curve, fs=None, amp=1, position=0,
pitch_max_in=None, pitch_max_out=None, phrase_max_out=None):
self.curve = curve
self.fs = fs
self.amp = amp
self.position = position
self.pitch_max_in = pitch_max_in
self.pitch_max_out = pitch_max_out
self.phrase_max_out = phrase_max_out
def get_curve(self):
if self.curve is not None:
return self.curve
else:
raise Exception("Function not generated")
def generate_curve(self):
if self.curve is None:
self.curve = self.get_curve()
def regenerate_curve(self):
self.curve = None
self.curve = self.get_curve()
def get_padded_curve(self, wanted_len, just_sign=False, include_amplitude=True):
self.generate_curve()
position = self.position
curve = self.curve
curve_len = len(self.curve)
# Crop if position is negative
if position < 0:
crop_len = -position
curve = curve[crop_len:]
position = 0
curve_len = len(curve)
pre_pad = position
if position + curve_len <= wanted_len:
post_pad = wanted_len - (position + curve_len)
elif pre_pad > wanted_len:
print 'WARNING: position %d > wanted_len %d while padding the atom.' % (position, wanted_len)
return np.zeros(wanted_len)
else:
# Crop the end
post_pad = 0
crop_len = position + curve_len - wanted_len
curve = curve[:-crop_len]
padded_curve = np.pad(curve, (pre_pad, post_pad), 'constant')
if just_sign:
padded_curve *= np.sign(self.amp)
return padded_curve
# Apply the gain:
if include_amplitude:
if not just_sign:
padded_curve *= self.amp
return padded_curve
def get_peak_position(self, wanted_len):
""" May be rewritten not to use the get_padded_curve() method."""
curve = self.get_padded_curve(wanted_len)
return np.argmax(curve)
class GammaAtom(Atom):
def __init__(self, k, theta, fs, amp=1, position=0, length=None):
self.curve = None
self.k = k
self.theta = theta
self.fs = fs
self.amp = amp
self.position = position
self.length = length
self.curve = self.get_curve()
def get_curve(self):
# If is already computed just return it
if self.curve is not None:
return self.curve
length = 20 # maximum length in sec - see later if this need to be calculated differently
k = self.k
theta = self.theta
# This is our time vector (just the length of the gamma atom):
t = np.linspace(0, length, length*self.fs, endpoint=False)
# np.vectorize is not really vectorized, it's just nicer way to loop
gamma_function = np.vectorize(lambda tt:
1/(math.gamma(k)*theta**k)*tt**(k-1)*math.exp(-tt/theta))
gamma_atom = gamma_function(t)
# # Now shorten the atoms
thresh = 1e-5 # don't go above 1e-5 because atoms will be shorter than f0 (for Macedonian sentence at least :)
gamma_atom_th_ind = np.where(gamma_atom > thresh) # indexes of elements above thresh
gamma_atom = gamma_atom[gamma_atom_th_ind]
gamma_atom -= np.min(gamma_atom)
gamma_atom /= linalg.norm(gamma_atom) # norm-2 of 1
self.length = len(gamma_atom)
return gamma_atom
class mutantGammaAtom(Atom):
def __init__(self, k, theta, theta_up, fs, amp=1, position=0, length=None):
self.curve = None
self.k = k
self.theta = theta
self.fs = fs
self.amp = amp
self.position = position
self.length = length
self.theta_up = theta_up
self.curve = self.get_curve()
def get_curve(self):
# If is already computed just return it
if self.curve is not None:
return self.curve
length = 20 # maximum length in sec - see later if this need to be calculated differently
k = self.k
theta = self.theta
theta_up = self.theta_up
# This is our time vector (just the length of the gamma atom):
t = np.linspace(0, length, length*self.fs, endpoint=False)
# np.vectorize is not really vectorized, it's just nicer way to loop
gamma_function_up = np.vectorize(lambda tt: 1/(math.gamma(k)*theta_up**k)*tt**(k-1)*math.exp(-tt/theta_up))
gamma_function_down = np.vectorize(lambda tt: 1/(math.gamma(k)*theta**k)*tt**(k-1)*math.exp(-tt/theta))
gamma_atom_up = gamma_function_up(t)
gamma_atom_down = gamma_function_down(t)
# stick them together : )
gamma_atom_up = gamma_atom_up[:np.argmax(gamma_atom_up)] / np.max(gamma_atom_up)
gamma_atom_down = gamma_atom_down[np.argmax(gamma_atom_down):] / np.max(gamma_atom_down)
gamma_atom = np.concatenate((gamma_atom_up, gamma_atom_down))
gamma_atom /= linalg.norm(gamma_atom) # this preserves array and eliminates for
return gamma_atom
| dipteam/wcad | wcad/object_types/atom.py | Python | gpl-3.0 | 5,941 |
# Django settings for example project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'test.db' # Or path to database file if using sqlite3.
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
MEDIA_ROOT = ''
MEDIA_URL = ''
ADMIN_MEDIA_PREFIX = '/media/'
SECRET_KEY = 's!ro3_a+b=j^0wvy7-r5frvd)ls6z61!2qs-^&-v&!5-9uns@-'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'example.urls'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'reportengine',
'example_reports',
'djcelery',
'djkombu',
)
ASYNC_REPORTS=True
BROKER_TRANSPORT = "djkombu.transport.DatabaseTransport"
| jrutila/django-reportengine | example/settings.py | Python | bsd-3-clause | 1,333 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
import uuid
from .. import models
class PolicyAssignmentsOperations(object):
"""PolicyAssignmentsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The API version to use for the operation. Constant value: "2016-12-01".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2016-12-01"
self.config = config
def delete(
self, scope, policy_assignment_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to
delete.
:type policy_assignment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/policyassignments/{policyAssignmentName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'policyAssignmentName': self._serialize.url("policy_assignment_name", policy_assignment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create(
self, scope, policy_assignment_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates a policy assignment.
Policy assignments are inherited by child resources. For example, when
you apply a policy to a resource group that policy is assigned to all
resources in the group.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment.
:type policy_assignment_name: str
:param parameters: Parameters for the policy assignment.
:type parameters: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/policyassignments/{policyAssignmentName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'policyAssignmentName': self._serialize.url("policy_assignment_name", policy_assignment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'PolicyAssignment')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get(
self, scope, policy_assignment_name, custom_headers=None, raw=False, **operation_config):
"""Gets a policy assignment.
:param scope: The scope of the policy assignment.
:type scope: str
:param policy_assignment_name: The name of the policy assignment to
get.
:type policy_assignment_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{scope}/providers/Microsoft.Authorization/policyassignments/{policyAssignmentName}'
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'policyAssignmentName': self._serialize.url("policy_assignment_name", policy_assignment_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_for_resource_group(
self, resource_group_name, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets policy assignments for the resource group.
:param resource_group_name: The name of the resource group that
contains policy assignments.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignmentPaged
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/policyAssignments'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str', skip_quote=True)
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.PolicyAssignmentPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.PolicyAssignmentPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_for_resource(
self, resource_group_name, resource_provider_namespace, parent_resource_path, resource_type, resource_name, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets policy assignments for a resource.
:param resource_group_name: The name of the resource group containing
the resource. The name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource
provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource path.
:type parent_resource_path: str
:param resource_type: The resource type.
:type resource_type: str
:param resource_name: The name of the resource with policy
assignments.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignmentPaged
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/policyassignments'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern='^[-\w\._\(\)]+$'),
'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str'),
'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True),
'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.PolicyAssignmentPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.PolicyAssignmentPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list(
self, filter=None, custom_headers=None, raw=False, **operation_config):
"""Gets all the policy assignments for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignmentPaged
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignmentPaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/policyassignments'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.PolicyAssignmentPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.PolicyAssignmentPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def delete_by_id(
self, policy_assignment_id, custom_headers=None, raw=False, **operation_config):
"""Deletes a policy assignment by ID.
When providing a scope for the assigment, use
'/subscriptions/{subscription-id}/' for subscriptions,
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}'
for resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to
delete. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{policyAssignmentId}'
path_format_arguments = {
'policyAssignmentId': self._serialize.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_by_id(
self, policy_assignment_id, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates a policy assignment by ID.
Policy assignments are inherited by child resources. For example, when
you apply a policy to a resource group that policy is assigned to all
resources in the group. When providing a scope for the assigment, use
'/subscriptions/{subscription-id}/' for subscriptions,
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}'
for resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to
create. Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:param parameters: Parameters for policy assignment.
:type parameters: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{policyAssignmentId}'
path_format_arguments = {
'policyAssignmentId': self._serialize.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'PolicyAssignment')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_by_id(
self, policy_assignment_id, custom_headers=None, raw=False, **operation_config):
"""Gets a policy assignment by ID.
When providing a scope for the assigment, use
'/subscriptions/{subscription-id}/' for subscriptions,
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}'
for resource groups, and
'/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider-namespace}/{resource-type}/{resource-name}'
for resources.
:param policy_assignment_id: The ID of the policy assignment to get.
Use the format
'/{scope}/providers/Microsoft.Authorization/policyAssignments/{policy-assignment-name}'.
:type policy_assignment_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:rtype: :class:`PolicyAssignment
<azure.mgmt.resource.policy.v2016_12_01.models.PolicyAssignment>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/{policyAssignmentId}'
path_format_arguments = {
'policyAssignmentId': self._serialize.url("policy_assignment_id", policy_assignment_id, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PolicyAssignment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| SUSE/azure-sdk-for-python | azure-mgmt-resource/azure/mgmt/resource/policy/v2016_12_01/operations/policy_assignments_operations.py | Python | mit | 30,831 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.