code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
"""
1. Subset(second le)
2. LCS
Given two strings 'X' and 'Y', find the length of the longest common substring.
For example, if the given strings are "GeeksforGeeks" and "GeeksQuiz",
the output should be 5 as longest common substring is "Geeks"
[Solution](http://www.geeksforgeeks.org/longest-common-substring/)
DP is O(n^2)
[Naive](http://www.geeksforgeeks.org/searching-for-patterns-set-2-kmp-algorithm/) way would be O(n * m^2), similar to KMP way
Which is for each word, we start search
str1 == substring of str2
Substring of str2 O(m^2)
search str1 O(n)
So O(n * m^2)
1. dp[i][j] is LCS of first i-1 chars in a ends with char i-1 and first j-1 chars in b ends with char j-1
2. init dp[i][j] = 0
3. dp[i][j] = dp[i-1][j-1] + 1 # if a[i] == b[j]
0 # if a[i] != b[j]
4. max(dp[0...M][0...N])
"""
def Longest_Common_Substring(a,b):
lengtha = len(a)
lengthb = len(b)
dp = [[0 for j in xrange(lengthb+1)] for i in xrange(lengtha+1)]
maxlength = 0
res = (maxlength, None)
for i in xrange(1,lengtha+1):
for j in xrange(1,lengthb+1):
if a[i-1] == b[j-1]:
dp[i][j] = dp[i-1][j-1]+1
if dp[i][j] > maxlength:
maxlength = dp[i][j]
res = (dp[i][j],i)
else:
dp[i][j] = 0
word = []
i = res[1]
print "i: ", i
for j in xrange(i-res[0],i):
word.append(a[j])
string = ''.join(word)
print "word: ", string
print "maxlength: ", maxlength
print Longest_Common_Substring("abc", "abz")
print Longest_Common_Substring("abcdefgabyzzkabcde", "zzzzzzgabyzzabcabcdefg")
print Longest_Common_Substring("GeeksforGeeks", "GeeksQuiz")
| UmassJin/Leetcode | Experience/Longest_Common_Substring.py | Python | mit | 1,722 |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lib.actions import OrionBaseAction
class NodeDiscoverAndAddInterfaces(OrionBaseAction):
def run(self, node, platform, whitelist=[], blacklist=[]):
"""
Discover and add interfaces on an Orion node
"""
results = {'added': [], 'existing': []}
self.connect(platform)
NodeId = self.get_node_id(node)
Discoverdinterfaces = self.invoke('Orion.NPM.Interfaces',
'DiscoverInterfacesOnNode',
NodeId)
add_interfaces = []
for interface in Discoverdinterfaces['DiscoveredInterfaces']:
# Unmonitored interfaces have an InterfaceID of 0.
if not interface['InterfaceID'] == 0:
self.logger.info("Skipping {} as monitored (I:{})".format(
interface['Caption'],
interface['InterfaceID']))
results['existing'].append(
{interface['Caption']: interface['InterfaceID']})
continue
if interface['Caption'] in blacklist:
self.logger.info("Skipping {} as in blacklist".format(
interface['Caption']))
continue
elif interface['Caption'] in whitelist:
self.logger.info("Adding {} as in whitelist".format(
interface['Caption']))
add_interfaces.append(interface)
elif not whitelist:
add_interfaces.append(interface)
else:
continue
additions = self.invoke('Orion.NPM.Interfaces',
'AddInterfacesOnNode',
NodeId,
add_interfaces,
'AddDefaultPollers')
for i in additions['DiscoveredInterfaces']:
results['added'].append({i['Caption']: i['InterfaceID']})
return results
| pidah/st2contrib | packs/orion/actions/node_discover_and_add_interfaces.py | Python | apache-2.0 | 2,765 |
from setuptools import setup
setup(
name='corputil',
version='0.7.0',
packages=['corputil'],
package_data={'corputil': ['stopwords/*.txt']},
install_requires=['nltk'],
url='',
license='',
author='Sascha Can',
author_email='[email protected]',
description=''
)
| Saytiras/corputil | setup.py | Python | mit | 305 |
#!/usr/bin/env python
command = oslc("../common/shaders/testpnoise.osl")
command += testshade("-g 512 512 -od uint8 -o Cout out.tif -param noisename perlin testpnoise")
outputs = [ "out.txt", "out.tif" ]
| rlmh/OpenShadingLanguage | testsuite/pnoise-perlin/run.py | Python | bsd-3-clause | 205 |
#!/usr/bin/python
#
# Copyright (C) 2006, 2007, 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for unittesting the utils module"""
import errno
import fcntl
import glob
import os
import os.path
import re
import shutil
import signal
import socket
import stat
import tempfile
import time
import unittest
import warnings
import random
import operator
import testutils
from ganeti import constants
from ganeti import compat
from ganeti import utils
from ganeti import errors
from ganeti.utils import RunCmd, \
FirstFree, \
RunParts
class TestParseCpuMask(unittest.TestCase):
"""Test case for the ParseCpuMask function."""
def testWellFormed(self):
self.assertEqual(utils.ParseCpuMask(""), [])
self.assertEqual(utils.ParseCpuMask("1"), [1])
self.assertEqual(utils.ParseCpuMask("0-2,4,5-5"), [0,1,2,4,5])
def testInvalidInput(self):
for data in ["garbage", "0,", "0-1-2", "2-1", "1-a"]:
self.assertRaises(errors.ParseError, utils.ParseCpuMask, data)
class TestParseMultiCpuMask(unittest.TestCase):
"""Test case for the ParseMultiCpuMask function."""
def testWellFormed(self):
self.assertEqual(utils.ParseMultiCpuMask(""), [])
self.assertEqual(utils.ParseMultiCpuMask("1"), [[1]])
self.assertEqual(utils.ParseMultiCpuMask("0-2,4,5-5"), [[0, 1, 2, 4, 5]])
self.assertEqual(utils.ParseMultiCpuMask("all"), [[-1]])
self.assertEqual(utils.ParseMultiCpuMask("0-2:all:4,6-8"),
[[0, 1, 2], [-1], [4, 6, 7, 8]])
def testInvalidInput(self):
for data in ["garbage", "0,", "0-1-2", "2-1", "1-a", "all-all"]:
self.assertRaises(errors.ParseError, utils.ParseCpuMask, data)
class TestGetMounts(unittest.TestCase):
"""Test case for GetMounts()."""
TESTDATA = (
"rootfs / rootfs rw 0 0\n"
"none /sys sysfs rw,nosuid,nodev,noexec,relatime 0 0\n"
"none /proc proc rw,nosuid,nodev,noexec,relatime 0 0\n")
def setUp(self):
self.tmpfile = tempfile.NamedTemporaryFile()
utils.WriteFile(self.tmpfile.name, data=self.TESTDATA)
def testGetMounts(self):
self.assertEqual(utils.GetMounts(filename=self.tmpfile.name),
[
("rootfs", "/", "rootfs", "rw"),
("none", "/sys", "sysfs", "rw,nosuid,nodev,noexec,relatime"),
("none", "/proc", "proc", "rw,nosuid,nodev,noexec,relatime"),
])
class TestFirstFree(unittest.TestCase):
"""Test case for the FirstFree function"""
def test(self):
"""Test FirstFree"""
self.failUnlessEqual(FirstFree([0, 1, 3]), 2)
self.failUnlessEqual(FirstFree([]), None)
self.failUnlessEqual(FirstFree([3, 4, 6]), 0)
self.failUnlessEqual(FirstFree([3, 4, 6], base=3), 5)
self.failUnlessRaises(AssertionError, FirstFree, [0, 3, 4, 6], base=3)
class TestTimeFunctions(unittest.TestCase):
"""Test case for time functions"""
def runTest(self):
self.assertEqual(utils.SplitTime(1), (1, 0))
self.assertEqual(utils.SplitTime(1.5), (1, 500000))
self.assertEqual(utils.SplitTime(1218448917.4809151), (1218448917, 480915))
self.assertEqual(utils.SplitTime(123.48012), (123, 480120))
self.assertEqual(utils.SplitTime(123.9996), (123, 999600))
self.assertEqual(utils.SplitTime(123.9995), (123, 999500))
self.assertEqual(utils.SplitTime(123.9994), (123, 999400))
self.assertEqual(utils.SplitTime(123.999999999), (123, 999999))
self.assertRaises(AssertionError, utils.SplitTime, -1)
self.assertEqual(utils.MergeTime((1, 0)), 1.0)
self.assertEqual(utils.MergeTime((1, 500000)), 1.5)
self.assertEqual(utils.MergeTime((1218448917, 500000)), 1218448917.5)
self.assertEqual(round(utils.MergeTime((1218448917, 481000)), 3),
1218448917.481)
self.assertEqual(round(utils.MergeTime((1, 801000)), 3), 1.801)
self.assertRaises(AssertionError, utils.MergeTime, (0, -1))
self.assertRaises(AssertionError, utils.MergeTime, (0, 1000000))
self.assertRaises(AssertionError, utils.MergeTime, (0, 9999999))
self.assertRaises(AssertionError, utils.MergeTime, (-1, 0))
self.assertRaises(AssertionError, utils.MergeTime, (-9999, 0))
class FieldSetTestCase(unittest.TestCase):
"""Test case for FieldSets"""
def testSimpleMatch(self):
f = utils.FieldSet("a", "b", "c", "def")
self.failUnless(f.Matches("a"))
self.failIf(f.Matches("d"), "Substring matched")
self.failIf(f.Matches("defghi"), "Prefix string matched")
self.failIf(f.NonMatching(["b", "c"]))
self.failIf(f.NonMatching(["a", "b", "c", "def"]))
self.failUnless(f.NonMatching(["a", "d"]))
def testRegexMatch(self):
f = utils.FieldSet("a", "b([0-9]+)", "c")
self.failUnless(f.Matches("b1"))
self.failUnless(f.Matches("b99"))
self.failIf(f.Matches("b/1"))
self.failIf(f.NonMatching(["b12", "c"]))
self.failUnless(f.NonMatching(["a", "1"]))
class TestForceDictType(unittest.TestCase):
"""Test case for ForceDictType"""
KEY_TYPES = {
"a": constants.VTYPE_INT,
"b": constants.VTYPE_BOOL,
"c": constants.VTYPE_STRING,
"d": constants.VTYPE_SIZE,
"e": constants.VTYPE_MAYBE_STRING,
}
def _fdt(self, dict, allowed_values=None):
if allowed_values is None:
utils.ForceDictType(dict, self.KEY_TYPES)
else:
utils.ForceDictType(dict, self.KEY_TYPES, allowed_values=allowed_values)
return dict
def testSimpleDict(self):
self.assertEqual(self._fdt({}), {})
self.assertEqual(self._fdt({"a": 1}), {"a": 1})
self.assertEqual(self._fdt({"a": "1"}), {"a": 1})
self.assertEqual(self._fdt({"a": 1, "b": 1}), {"a":1, "b": True})
self.assertEqual(self._fdt({"b": 1, "c": "foo"}), {"b": True, "c": "foo"})
self.assertEqual(self._fdt({"b": 1, "c": False}), {"b": True, "c": ""})
self.assertEqual(self._fdt({"b": "false"}), {"b": False})
self.assertEqual(self._fdt({"b": "False"}), {"b": False})
self.assertEqual(self._fdt({"b": False}), {"b": False})
self.assertEqual(self._fdt({"b": "true"}), {"b": True})
self.assertEqual(self._fdt({"b": "True"}), {"b": True})
self.assertEqual(self._fdt({"d": "4"}), {"d": 4})
self.assertEqual(self._fdt({"d": "4M"}), {"d": 4})
self.assertEqual(self._fdt({"e": None, }), {"e": None, })
self.assertEqual(self._fdt({"e": "Hello World", }), {"e": "Hello World", })
self.assertEqual(self._fdt({"e": False, }), {"e": "", })
self.assertEqual(self._fdt({"b": "hello", }, ["hello"]), {"b": "hello"})
def testErrors(self):
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"a": "astring"})
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"b": "hello"})
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"c": True})
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"d": "astring"})
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"d": "4 L"})
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"e": object(), })
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"e": [], })
self.assertRaises(errors.TypeEnforcementError, self._fdt, {"x": None, })
self.assertRaises(errors.TypeEnforcementError, self._fdt, [])
self.assertRaises(errors.ProgrammerError, utils.ForceDictType,
{"b": "hello"}, {"b": "no-such-type"})
class TestValidateServiceName(unittest.TestCase):
def testValid(self):
testnames = [
0, 1, 2, 3, 1024, 65000, 65534, 65535,
"ganeti",
"gnt-masterd",
"HELLO_WORLD_SVC",
"hello.world.1",
"0", "80", "1111", "65535",
]
for name in testnames:
self.assertEqual(utils.ValidateServiceName(name), name)
def testInvalid(self):
testnames = [
-15756, -1, 65536, 133428083,
"", "Hello World!", "!", "'", "\"", "\t", "\n", "`",
"-8546", "-1", "65536",
(129 * "A"),
]
for name in testnames:
self.assertRaises(errors.OpPrereqError, utils.ValidateServiceName, name)
class TestReadLockedPidFile(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def testNonExistent(self):
path = utils.PathJoin(self.tmpdir, "nonexist")
self.assert_(utils.ReadLockedPidFile(path) is None)
def testUnlocked(self):
path = utils.PathJoin(self.tmpdir, "pid")
utils.WriteFile(path, data="123")
self.assert_(utils.ReadLockedPidFile(path) is None)
def testLocked(self):
path = utils.PathJoin(self.tmpdir, "pid")
utils.WriteFile(path, data="123")
fl = utils.FileLock.Open(path)
try:
fl.Exclusive(blocking=True)
self.assertEqual(utils.ReadLockedPidFile(path), 123)
finally:
fl.Close()
self.assert_(utils.ReadLockedPidFile(path) is None)
def testError(self):
path = utils.PathJoin(self.tmpdir, "foobar", "pid")
utils.WriteFile(utils.PathJoin(self.tmpdir, "foobar"), data="")
# open(2) should return ENOTDIR
self.assertRaises(EnvironmentError, utils.ReadLockedPidFile, path)
class TestFindMatch(unittest.TestCase):
def test(self):
data = {
"aaaa": "Four A",
"bb": {"Two B": True},
re.compile(r"^x(foo|bar|bazX)([0-9]+)$"): (1, 2, 3),
}
self.assertEqual(utils.FindMatch(data, "aaaa"), ("Four A", []))
self.assertEqual(utils.FindMatch(data, "bb"), ({"Two B": True}, []))
for i in ["foo", "bar", "bazX"]:
for j in range(1, 100, 7):
self.assertEqual(utils.FindMatch(data, "x%s%s" % (i, j)),
((1, 2, 3), [i, str(j)]))
def testNoMatch(self):
self.assert_(utils.FindMatch({}, "") is None)
self.assert_(utils.FindMatch({}, "foo") is None)
self.assert_(utils.FindMatch({}, 1234) is None)
data = {
"X": "Hello World",
re.compile("^(something)$"): "Hello World",
}
self.assert_(utils.FindMatch(data, "") is None)
self.assert_(utils.FindMatch(data, "Hello World") is None)
class TestTryConvert(unittest.TestCase):
def test(self):
for src, fn, result in [
("1", int, 1),
("a", int, "a"),
("", bool, False),
("a", bool, True),
]:
self.assertEqual(utils.TryConvert(fn, src), result)
class TestVerifyDictOptions(unittest.TestCase):
def setUp(self):
self.defaults = {
"first_key": "foobar",
"foobar": {
"key1": "value2",
"key2": "value1",
},
"another_key": "another_value",
}
def test(self):
some_keys = {
"first_key": "blubb",
"foobar": {
"key2": "foo",
},
}
utils.VerifyDictOptions(some_keys, self.defaults)
def testInvalid(self):
some_keys = {
"invalid_key": "blubb",
"foobar": {
"key2": "foo",
},
}
self.assertRaises(errors.OpPrereqError, utils.VerifyDictOptions,
some_keys, self.defaults)
def testNestedInvalid(self):
some_keys = {
"foobar": {
"key2": "foo",
"key3": "blibb"
},
}
self.assertRaises(errors.OpPrereqError, utils.VerifyDictOptions,
some_keys, self.defaults)
def testMultiInvalid(self):
some_keys = {
"foobar": {
"key1": "value3",
"key6": "Right here",
},
"invalid_with_sub": {
"sub1": "value3",
},
}
self.assertRaises(errors.OpPrereqError, utils.VerifyDictOptions,
some_keys, self.defaults)
if __name__ == "__main__":
testutils.GanetiTestProgram()
| sarahn/ganeti | test/py/ganeti.utils_unittest.py | Python | gpl-2.0 | 12,151 |
"""
Specific permissions for the micromasters app
"""
from rolepermissions.permissions import register_object_checker
from rolepermissions.checkers import has_permission
from roles.models import Role
from roles.roles import Permissions
@register_object_checker()
def can_advance_search(role, user, program):
"""
Determines whether a user can perform an advanced search on a specific program.
"""
return (
has_permission(user, Permissions.CAN_ADVANCE_SEARCH) and Role.objects.filter(
user=user, role=role.ROLE_ID, program=program).exists()
)
@register_object_checker()
def can_edit_financial_aid(role, user, program):
"""
Determines whether a user can access and edit financial aid requests for a specific program.
"""
return (
has_permission(user, Permissions.CAN_EDIT_FINANCIAL_AID) and Role.objects.filter(
user=user, role=role.ROLE_ID, program=program).exists()
)
| mitodl/micromasters | roles/permissions.py | Python | bsd-3-clause | 953 |
from c2corg_api.models.route import Route, RouteLocale, ROUTE_TYPE
from c2corg_api.models.document_tag import DocumentTag, DocumentTagLog
from c2corg_api.models.user import User
from c2corg_api.tests.views import BaseTestRest
from c2corg_api.views.document_tag import get_tag_relation
def has_tagged(user_id, document_id):
return get_tag_relation(user_id, document_id) is not None
class BaseDocumentTagTest(BaseTestRest):
def setUp(self): # noqa
super().setUp()
self.contributor = self.session.query(User).get(
self.global_userids['contributor'])
self.contributor2 = self.session.query(User).get(
self.global_userids['contributor2'])
self.route1 = Route(activities=['skitouring'], locales=[
RouteLocale(lang='en', title='Route1')
])
self.session.add(self.route1)
self.route2 = Route(activities=['skitouring'], locales=[
RouteLocale(lang='en', title='Route2')
])
self.session.add(self.route2)
self.route3 = Route(activities=['hiking'], locales=[
RouteLocale(lang='en', title='Route3')
])
self.session.add(self.route3)
self.session.flush()
self.session.add(DocumentTag(
user_id=self.contributor2.id,
document_id=self.route2.document_id,
document_type=ROUTE_TYPE))
self.session.flush()
class TestDocumentTagRest(BaseDocumentTagTest):
def setUp(self): # noqa
super().setUp()
self._prefix = '/tags/add'
def test_tag_unauthenticated(self):
self.app_post_json(self._prefix, {}, status=403)
def test_tag(self):
request_body = {
'document_id': self.route1.document_id
}
self.post_json_with_contributor(
self._prefix, request_body, status=200, username='contributor')
self.assertTrue(
has_tagged(self.contributor.id, self.route1.document_id))
log = self.session.query(DocumentTagLog). \
filter(DocumentTagLog.document_id == self.route1.document_id). \
filter(DocumentTagLog.user_id == self.contributor.id). \
filter(DocumentTagLog.document_type == ROUTE_TYPE). \
one()
self.assertTrue(log.is_creation)
self.post_json_with_contributor(
self._prefix, request_body, status=400, username='contributor')
self.assertNotifiedEs()
class TestDocumentUntagRest(BaseDocumentTagTest):
def setUp(self): # noqa
super().setUp()
self._prefix = '/tags/remove'
def test_untag_unauthenticated(self):
self.app_post_json(self._prefix, {}, status=403)
def test_untag(self):
request_body = {
'document_id': self.route2.document_id
}
self.assertTrue(
has_tagged(self.contributor2.id, self.route2.document_id))
self.post_json_with_contributor(
self._prefix, request_body, status=200, username='contributor2')
self.assertFalse(
has_tagged(self.contributor2.id, self.route2.document_id))
self.post_json_with_contributor(
self._prefix, request_body, status=400, username='contributor2')
log = self.session.query(DocumentTagLog). \
filter(DocumentTagLog.document_id == self.route2.document_id). \
filter(DocumentTagLog.user_id == self.contributor2.id). \
filter(DocumentTagLog.document_type == ROUTE_TYPE). \
one()
self.assertFalse(log.is_creation)
self.assertNotifiedEs()
def test_untag_not_tagged(self):
request_body = {
'document_id': self.route1.document_id
}
self.assertFalse(
has_tagged(self.contributor.id, self.route1.document_id))
self.post_json_with_contributor(
self._prefix, request_body, status=400, username='contributor')
self.assertFalse(
has_tagged(self.contributor.id, self.route1.document_id))
self.assertNotNotifiedEs()
class TestDocumentTaggedRest(BaseDocumentTagTest):
def setUp(self): # noqa
super().setUp()
self._prefix = '/tags/has'
def test_has_tagged_unauthenticated(self):
self.app.get(self._prefix + '/123', status=403)
def test_has_tagged(self):
headers = self.add_authorization_header(username='contributor2')
response = self.app.get(
'{}/{}'.format(self._prefix, self.route2.document_id),
status=200, headers=headers)
body = response.json
self.assertTrue(body['todo'])
def test_has_tagged_not(self):
headers = self.add_authorization_header(username='contributor')
response = self.app.get(
'{}/{}'.format(self._prefix, self.route1.document_id),
status=200, headers=headers)
body = response.json
self.assertFalse(body['todo'])
| c2corg/v6_api | c2corg_api/tests/views/test_document_tag.py | Python | agpl-3.0 | 4,952 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
import urlparse
import re
import werkzeug.urls
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import osv, fields
URL_REGEX = r'(\bhref=[\'"]([^\'"]+)[\'"])'
class MailMail(osv.Model):
"""Add the mass mailing campaign data to mail"""
_name = 'mail.mail'
_inherit = ['mail.mail']
_columns = {
'mailing_id': fields.many2one('mail.mass_mailing', 'Mass Mailing'),
'statistics_ids': fields.one2many(
'mail.mail.statistics', 'mail_mail_id',
string='Statistics',
),
}
def create(self, cr, uid, values, context=None):
""" Override mail_mail creation to create an entry in mail.mail.statistics """
# TDE note: should be after 'all values computed', to have values (FIXME after merging other branch holding create refactoring)
mail_id = super(MailMail, self).create(cr, uid, values, context=context)
if values.get('statistics_ids'):
mail = self.browse(cr, SUPERUSER_ID, mail_id, context=context)
for stat in mail.statistics_ids:
self.pool['mail.mail.statistics'].write(cr, uid, [stat.id], {'message_id': mail.message_id, 'state': 'outgoing'}, context=context)
return mail_id
def _get_tracking_url(self, cr, uid, mail, partner=None, context=None):
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
track_url = urlparse.urljoin(
base_url, 'mail/track/%(mail_id)s/blank.gif?%(params)s' % {
'mail_id': mail.id,
'params': werkzeug.url_encode({'db': cr.dbname})
}
)
return '<img src="%s" alt=""/>' % track_url
def _get_unsubscribe_url(self, cr, uid, mail, email_to, context=None):
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
url = urlparse.urljoin(
base_url, 'mail/mailing/%(mailing_id)s/unsubscribe?%(params)s' % {
'mailing_id': mail.mailing_id.id,
'params': werkzeug.url_encode({'db': cr.dbname, 'res_id': mail.res_id, 'email': email_to})
}
)
return url
def send_get_mail_body(self, cr, uid, ids, partner=None, context=None):
""" Override to add the tracking URL to the body and to add
Statistic_id in shorted urls """
# TDE: temporary addition (mail was parameter) due to semi-new-API
body = super(MailMail, self).send_get_mail_body(cr, uid, ids, partner=partner, context=context)
mail = self.browse(cr, uid, ids[0], context=context)
links_blacklist = ['/unsubscribe_from_list']
if mail.mailing_id and body and mail.statistics_ids:
for match in re.findall(URL_REGEX, mail.body_html):
href = match[0]
url = match[1]
if not [s for s in links_blacklist if s in href]:
new_href = href.replace(url, url + '/m/' + str(mail.statistics_ids[0].id))
body = body.replace(href, new_href)
# prepend <base> tag for images using absolute urls
domain = self.pool.get("ir.config_parameter").get_param(cr, uid, "web.base.url", context=context)
base = "<base href='%s'>" % domain
body = tools.append_content_to_html(base, body, plaintext=False, container_tag='div')
# resolve relative image url to absolute for outlook.com
def _sub_relative2absolute(match):
return match.group(1) + urlparse.urljoin(domain, match.group(2))
body = re.sub('(<img(?=\s)[^>]*\ssrc=")(/[^/][^"]+)', _sub_relative2absolute, body)
body = re.sub(r'(<[^>]+\bstyle="[^"]+\burl\(\'?)(/[^/\'][^\'")]+)', _sub_relative2absolute, body)
# generate tracking URL
if mail.statistics_ids:
tracking_url = self._get_tracking_url(cr, uid, mail, partner, context=context)
if tracking_url:
body = tools.append_content_to_html(body, tracking_url, plaintext=False, container_tag='div')
return body
def send_get_email_dict(self, cr, uid, ids, partner=None, context=None):
# TDE: temporary addition (mail was parameter) due to semi-new-API
res = super(MailMail, self).send_get_email_dict(cr, uid, ids, partner, context=context)
mail = self.browse(cr, uid, ids[0], context=context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
if mail.mailing_id and res.get('body') and res.get('email_to'):
emails = tools.email_split(res.get('email_to')[0])
email_to = emails and emails[0] or False
unsubscribe_url= self._get_unsubscribe_url(cr, uid, mail, email_to, context=context)
link_to_replace = base_url+'/unsubscribe_from_list'
if link_to_replace in res['body']:
res['body'] = res['body'].replace(link_to_replace, unsubscribe_url if unsubscribe_url else '#')
return res
def _postprocess_sent_message(self, cr, uid, mail, context=None, mail_sent=True):
if mail_sent is True and mail.statistics_ids:
self.pool['mail.mail.statistics'].write(cr, uid, [s.id for s in mail.statistics_ids], {'sent': fields.datetime.now()}, context=context)
elif mail_sent is False and mail.statistics_ids:
self.pool['mail.mail.statistics'].write(cr, uid, [s.id for s in mail.statistics_ids], {'exception': fields.datetime.now()}, context=context)
return super(MailMail, self)._postprocess_sent_message(cr, uid, mail, context=context, mail_sent=mail_sent)
| syci/OCB | addons/mass_mailing/models/mail_mail.py | Python | agpl-3.0 | 5,729 |
"""Create atomic displacements."""
# Copyright (C) 2011 Atsushi Togo
# All rights reserved.
#
# This file is part of phonopy.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of the phonopy project nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from typing import Union
import numpy as np
from phonopy.harmonic.derivative_dynmat import DerivativeOfDynamicalMatrix
from phonopy.harmonic.dynamical_matrix import DynamicalMatrix, DynamicalMatrixNAC
from phonopy.interface.vasp import write_vasp
from phonopy.phonon.degeneracy import get_eigenvectors
from phonopy.structure.cells import get_supercell
from phonopy.units import VaspToTHz
class Modulation:
"""Class to create atomic displacements."""
def __init__(
self,
dynamical_matrix: Union[DynamicalMatrix, DynamicalMatrixNAC],
dimension,
phonon_modes,
delta_q=None,
derivative_order=None,
nac_q_direction=None,
factor=VaspToTHz,
):
"""Init method."""
self._dm = dynamical_matrix
self._primitive = dynamical_matrix.primitive
self._phonon_modes = phonon_modes
self._dimension = np.array(dimension).ravel()
self._delta_q = delta_q # 1st/2nd order perturbation direction
self._nac_q_direction = nac_q_direction
self._ddm = DerivativeOfDynamicalMatrix(dynamical_matrix)
self._derivative_order = derivative_order
self._factor = factor
dim = self._get_dimension_3x3()
self._supercell = get_supercell(self._primitive, dim)
complex_dtype = "c%d" % (np.dtype("double").itemsize * 2)
self._u = np.zeros(
(len(self._phonon_modes), len(self._supercell), 3),
dtype=complex_dtype,
order="C",
)
self._eigvals = np.zeros(len(self._phonon_modes), dtype="double")
self._eigvecs = np.zeros(
(len(self._phonon_modes), len(self._primitive) * 3), dtype=complex_dtype
)
def run(self):
"""Calculate modulations."""
for i, ph_mode in enumerate(self._phonon_modes):
q, band_index, amplitude, argument = ph_mode
eigvals, eigvecs = get_eigenvectors(
q,
self._dm,
self._ddm,
perturbation=self._delta_q,
derivative_order=self._derivative_order,
nac_q_direction=self._nac_q_direction,
)
u = self._get_displacements(eigvecs[:, band_index], q, amplitude, argument)
self._u[i] = u
self._eigvecs[i] = eigvecs[:, band_index]
self._eigvals[i] = eigvals[band_index]
def get_modulated_supercells(self):
"""Return modulations."""
modulations = []
for u in self._u:
modulations.append(self._get_cell_with_modulation(u))
return modulations
def get_modulations_and_supercell(self):
"""Return modulations and perfect supercell."""
return self._u, self._supercell
def write(self, filename="MPOSCAR"):
"""Write supercells with modulations to MPOSCARs."""
deltas = []
for i, u in enumerate(self._u):
cell = self._get_cell_with_modulation(u)
write_vasp((filename + "-%03d") % (i + 1), cell, direct=True)
deltas.append(u)
sum_of_deltas = np.sum(deltas, axis=0)
cell = self._get_cell_with_modulation(sum_of_deltas)
write_vasp(filename, cell, direct=True)
no_modulations = np.zeros(sum_of_deltas.shape, dtype=complex)
cell = self._get_cell_with_modulation(no_modulations)
write_vasp(filename + "-orig", cell, direct=True)
def write_yaml(self, filename="modulation.yaml"):
"""Write modulations to file in yaml."""
self._write_yaml(filename=filename)
def _get_cell_with_modulation(self, modulation):
lattice = self._supercell.cell
positions = self._supercell.positions
positions += modulation.real
scaled_positions = np.dot(positions, np.linalg.inv(lattice))
for p in scaled_positions:
p -= np.floor(p)
cell = self._supercell.copy()
cell.scaled_positions = scaled_positions
return cell
def _get_dimension_3x3(self):
if len(self._dimension) == 3:
dim = np.diag(self._dimension)
elif len(self._dimension) == 9:
dim = np.reshape(self._dimension, (3, 3))
else:
dim = np.array(self._dimension)
if dim.shape == (3, 3):
dim = np.array(dim, dtype="intc")
else:
print("Dimension is incorrectly set. Unit cell is used.")
dim = np.eye(3, dtype="intc")
return dim
def _get_displacements(self, eigvec, q, amplitude, argument):
m = self._supercell.masses
s2u_map = self._supercell.s2u_map
u2u_map = self._supercell.u2u_map
s2uu_map = [u2u_map[x] for x in s2u_map]
spos = self._supercell.scaled_positions
dim = self._supercell.supercell_matrix
coefs = np.exp(2j * np.pi * np.dot(np.dot(spos, dim.T), q)) / np.sqrt(m)
u = []
for i, coef in enumerate(coefs):
eig_index = s2uu_map[i] * 3
u.append(eigvec[eig_index : eig_index + 3] * coef)
u = np.array(u) / np.sqrt(len(m))
phase_factor = self._get_phase_factor(u, argument)
u *= phase_factor * amplitude
return u
def _get_phase_factor(self, modulation, argument):
u = np.ravel(modulation)
index_max_elem = np.argmax(abs(u))
max_elem = u[index_max_elem]
phase_for_zero = max_elem / abs(max_elem)
phase_factor = np.exp(1j * np.pi * argument / 180) / phase_for_zero
return phase_factor
def _eigvals_to_frequencies(self, eigvals):
e = np.array(eigvals).real
return np.sqrt(np.abs(e)) * np.sign(e) * self._factor
def _write_yaml(self, filename="modulation.yaml"):
w = open(filename, "w")
primitive = self._dm.primitive
num_atom = len(primitive)
w.write("primitive_cell:\n")
self._write_cell_yaml(primitive, w)
w.write("supercell:\n")
dim = self._get_dimension_3x3()
w.write(" dimension:\n")
for v in dim:
w.write(" - [ %d, %d, %d ]\n" % tuple(v))
self._write_cell_yaml(self._supercell, w)
inv_lattice = np.linalg.inv(self._supercell.cell.T)
w.write("modulations:\n")
for u, mode in zip(self._u, self._phonon_modes):
q = mode[0]
w.write("- q-position: [ %12.7f, %12.7f, %12.7f ]\n" % tuple(q))
w.write(" band: %d\n" % (mode[1] + 1))
w.write(" amplitude: %f\n" % mode[2])
w.write(" phase: %f\n" % mode[3])
w.write(" displacements:\n")
for i, p in enumerate(u):
w.write(
" - [ %20.15f, %20.15f ] # %d x (%f)\n"
% (p[0].real, p[0].imag, i + 1, abs(p[0]))
)
w.write(
" - [ %20.15f, %20.15f ] # %d y (%f)\n"
% (p[1].real, p[1].imag, i + 1, abs(p[1]))
)
w.write(
" - [ %20.15f, %20.15f ] # %d z (%f)\n"
% (p[2].real, p[2].imag, i + 1, abs(p[2]))
)
w.write(" fractional_displacements:\n")
for i, p in enumerate(np.dot(u, inv_lattice.T)):
w.write(
" - [ %20.15f, %20.15f ] # %d a\n" % (p[0].real, p[0].imag, i + 1)
)
w.write(
" - [ %20.15f, %20.15f ] # %d b\n" % (p[1].real, p[1].imag, i + 1)
)
w.write(
" - [ %20.15f, %20.15f ] # %d c\n" % (p[2].real, p[2].imag, i + 1)
)
w.write("phonon:\n")
freqs = self._eigvals_to_frequencies(self._eigvals)
for eigvec, freq, mode in zip(self._eigvecs, freqs, self._phonon_modes):
w.write("- q-position: [ %12.7f, %12.7f, %12.7f ]\n" % tuple(mode[0]))
w.write(" band: %d\n" % (mode[1] + 1))
w.write(" amplitude: %f\n" % mode[2])
w.write(" phase: %f\n" % mode[3])
w.write(" frequency: %15.10f\n" % freq)
w.write(" eigenvector:\n")
for j in range(num_atom):
w.write(" - # atom %d\n" % (j + 1))
for k in (0, 1, 2):
val = eigvec[j * 3 + k]
w.write(
" - [ %17.14f, %17.14f ] # %f\n"
% (val.real, val.imag, np.angle(val, deg=True))
)
def _write_cell_yaml(self, cell, w):
lattice = cell.get_cell()
positions = cell.get_scaled_positions()
masses = cell.get_masses()
symbols = cell.get_chemical_symbols()
w.write(" atom_info:\n")
for m, s in zip(masses, symbols):
w.write(" - { name: %2s, mass: %10.5f }\n" % (s, m))
w.write(" reciprocal_lattice:\n")
for vec, axis in zip(np.linalg.inv(lattice), ("a*", "b*", "c*")):
w.write(" - [ %12.8f, %12.8f, %12.8f ] # %2s\n" % (tuple(vec) + (axis,)))
w.write(" real_lattice:\n")
w.write(" - [ %20.15f, %20.15f, %20.15f ]\n" % (tuple(lattice[0])))
w.write(" - [ %20.15f, %20.15f, %20.15f ]\n" % (tuple(lattice[1])))
w.write(" - [ %20.15f, %20.15f, %20.15f ]\n" % (tuple(lattice[2])))
w.write(" positions:\n")
for p in positions:
w.write(" - [ %20.15f, %20.15f, %20.15f ]\n" % (tuple(p)))
| atztogo/phonopy | phonopy/phonon/modulation.py | Python | bsd-3-clause | 11,079 |
__author__ = 'Jeddah'
__project__ = "WaterDemandMdeol"
import sampling_db as DB
import C_ApplianceType as Appliance
import C_UnitType as Unit
import C_ResidentialUnit as residential
import C_CommercialUnit as commercial
class WaterDemandModel():
"""
"""
__residential_units = [] # the number of residential units represented by an array. Where the integer at each index represent the number of actors in the unit. i.e. [3,4] = 2 units of 3 and 4 res actors respectively
__commercial_units = [] # the number of commercial units represented by an array. Where the integer at each index represent the number of actors in the unit. i.e. [10,15] = 2 units of 10 and 15 res actors respectively
__unit_type = str
__timeseries = {} # is the dictionary initialised at the start of each day to store the volumes of water generated by each appliance at an hourly timestep. As well as the number of actors in the building
__demand_output = {}
def __init__(self,residential_units,commercial_units):
self.__residential_units = residential_units
self.__commercial_units = commercial_units
self.__unit_type = str
self.__demand_output = {}
print(residential_units)
print((self.__residential_units))
self.__timeseries = WaterDemandModel.getInitialisedTimeseries(self)
#initialise methods:
self.initialiseTimeseries()
self.run()
def initialiseTimeseries(self):
"""
This method initialises a timeseries array for each event and places it within the dictionary called 'timeseries'. The key of the dictionary is the unit_type (from C_Unit_TYpe) e.g. "COMMERCIAL" and appliance_type (from C_Appliances()) e.g. 'toilet'.
For each appliance key an array of zeros is initialised. Where each zero represents one timestep of the timeseries. Volumes generated at different time steps can then be used to populate the initialised timeseries array.
The size of the array is dependant on the input 'elements'. Arrays for each appliance are made for all of the unit_types.
A key for the number of actors ("actors") is also initialised in this method
:return: an initialised dictionary {RESIDENTIAL:{event_type1:[0,0,0,0,0....n(elements)],event_type2: [0,0,0,0,0,0,....n(elements)],actors: 0.0}
"""
for u_type in Unit.unit_types: # for each of the unit types (e.g. residential, commercial etc.)
self.__timeseries[u_type] = {} # make a nested dictionary (e.g. timeseries = {'commercial': {} :, 'residential' :{}, }
self.__timeseries[u_type]["actors"]= 0.0 # create a key for the number of actors. Initialise it with a zero
# appliances used depend on unit type, therefore grab the correct appliance list:
if u_type == "RESIDENTIAL":
appliances = Appliance.residential_appliance_types
elif u_type == "COMMERCIAL":
appliances = Appliance.commercial_appliance_types
else:
print("error: the unit type does not exist")
# create an array of zero's for each appliance in the unit:
for a_type in appliances: # for all of the appliance types e.g. toilet, shower, etc.
array = [] # make a empty array
for i in range(24): # for each step in the timeseries of 24hrs:
array.append(0.0) # add a zero to a
self.__timeseries[u_type][a_type] = array # append it to the relevant array in the dictionary specified by unit type and appliance type.
def getInitialisedTimeseries(self):
return self.__timeseries
def createTimeSeries(self,events,unit_type,numberOfActors):
"""
This method is used to create a timeseries of water use volumes for each end use (event_type) within each unit type.
It does this by extracting the relevant information (volume, time) from the event objects generated by the relevant unit class
For each event_type a different array of water use volumes (at each hour of the day) is generated.
This information is stored in the form of a dictionary.
The method also adds the number of actors within the unit - based on inputs to the method
:param events: All the water use event objects from each appliance
:param unit_type: i.e. Comm or Res. Required for the dictionary key to attach the relevant number of actors
:param numberOfActors: number of actors (people) within the unit being modelled
:return: A filled in dictionary containing the water use volumes at each hour of the day for each appliances and the number of actors within
the unit.
"""
self.__timeseries[unit_type]["actors"] = numberOfActors # get the number of actors
for e in events: # for all of the water events generated by a household:
u_type = e.getUnitType() # get the unit type of the event (i.e is the event occurring in a residential unit or commercial unit)
# For individual event types:
volumes = self.__timeseries[u_type][e.getAppliance()] # From the dictionary, get the initialised array for the specific event_type.
# Each time it goes through the loop, it calls up the correct array (corresponding to the key),
# and adds the volume to it that is generated at a specific time using the code below.
start = e.getStartTime() # get the start time of the event
volume = e.getVolume() # get the volume of the event
volumes[int(start)] += volume # Add the volume generated to the relevant timestep of the volumes array (who's index is the same as that of the start time)
def run(self):
"""
This method is used to call all relevant methods within the WaterDemandModel code. It creates water demand
timeseries for a specified number of commercial and residential units.
:return: dictionary of the water volumes (L) made at each time step for each appliance. And the number of actors in the unit.
"""
"RESIDENTIAL EVENTS"
res_name_counter = 1 # used to create a name for each unit e.g. R1, R2
res_actor_counter = 0 # represents the index of the res_unit array --> so the relevant number of people can be returned
print((self.__residential_units))
for i in range(len(self.__residential_units)): # for the number of res units specified:
# create a res unit ID R1 --> Rn
res_name = "R" + str(res_name_counter)
self.__demand_output[res_name] = {}
res_name_counter += 1
# get the number of actors in the unit from the input vector:
number_of_residential_actors = self.__residential_units[res_actor_counter]
res_actor_counter += 1
r = residential.ResidentialUnit(number_of_residential_actors) # instantiate the Residential Unit Class (with the number of actors)
all_resident_unit_events = r.getAllUnitEvents() # get all of the residential unit events
self.createTimeSeries(all_resident_unit_events,"RESIDENTIAL",number_of_residential_actors) # populate the empty dictionary with volumes for each appliance. Append the number of actors.
self.__demand_output[res_name] = self.__timeseries["RESIDENTIAL"] # append the populated dictionary to the output dictionary. Only return Residential information. Otherwise an empty dictionary for Commercial is returned as well.
"COMMERCIAL EVENTS"
com_name_counter = 1 # used to create a name for each unit e.g. C1, C2
com_actor_counter = 0 # represents the index of the res_unit array --> so the relevant number of people can be returned
for j in range(len(self.__commercial_units)): # for the number of commercial units specified:
# create a res unit ID C1 --> Cn
comm_name = "C" + str(com_name_counter)
self.__demand_output[comm_name] = {}
com_name_counter += 1
# get the number of actors in the unit from the input vector:
number_of_commercial_actors = self.__commercial_units[com_actor_counter]
com_actor_counter += 1
c = commercial.CommercialUnit(number_of_commercial_actors) # instantiate the Commercial Unit Class (with the number of actors)
all_commercial_unit_events = c.getAllUnitEvents() # get all of the commercial unit events
self.createTimeSeries(all_commercial_unit_events,"COMMERCIAL",number_of_commercial_actors) # populate the empty dictionary with volumes for each appliance. Append the number of actors.
self.__demand_output[comm_name] = self.__timeseries["COMMERCIAL"] # append the populated dictionary to the output dictionary. Only return Commercial information. Otherwise an empty dictionary for Residential is returned as well.
def getDemands(self):
return self.__demand_output
if __name__ == "__main__":
res_units = [2]
com_units = [8]
run = WaterDemandModel(res_units,com_units)
a = run.getDemands()
print((run.getDemands()))
| christianurich/DynaMind-ToolBox | DynaMind-Performance-Assessment/3rdparty/CD3Waterbalance/WaterDemandModel/C_WaterDemandModel.py | Python | gpl-2.0 | 9,765 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('Direccion', '0001_initial'),
('Telefono', '0003_auto_20150319_1627'),
('Segmento', '0003_auto_20150320_0842'),
('Condicion_pago', '0006_auto_20150325_1005'),
]
operations = [
migrations.CreateModel(
name='Email_Proveedor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('email', models.EmailField(max_length=75)),
],
options={
'verbose_name_plural': 'Email de proveedores',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Proveedor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('proveedor', models.CharField(max_length=250)),
('monto_credito', models.DecimalField(max_digits=13, decimal_places=2)),
('sitio_web', models.URLField()),
('comentarios', models.TextField()),
('adicional1', models.CharField(max_length=250)),
('adicional2', models.CharField(max_length=250)),
('adicional3', models.CharField(max_length=250)),
('activo', models.BooleanField(default=True)),
('condicion_pago', models.ForeignKey(to='Condicion_pago.Condicion_pago')),
('segmento', models.ForeignKey(to='Segmento.Segmento')),
],
options={
'verbose_name_plural': 'Proveedores',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Proveedor_Direccion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('direccion', models.OneToOneField(to='Direccion.Direccion')),
('proveedor', models.ForeignKey(to='Proveedor.Proveedor')),
],
options={
'verbose_name_plural': 'Direcciones de proveedores',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Proveedor_Telefono',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('proveedor', models.ForeignKey(to='Proveedor.Proveedor')),
('telefono', models.ForeignKey(to='Telefono.Telefono')),
],
options={
'verbose_name_plural': 'Telefonos de proveedores',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Tipo_Proveedor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tipo_proveedor', models.CharField(max_length=100)),
('adicional1', models.CharField(max_length=250)),
('adicional2', models.CharField(max_length=250)),
('adicional3', models.CharField(max_length=250)),
('adicional4', models.CharField(max_length=250)),
('activo', models.BooleanField(default=True)),
],
options={
'verbose_name_plural': 'Tipos proveedores',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='proveedor',
name='tipo_proveedor',
field=models.ForeignKey(to='Proveedor.Tipo_Proveedor'),
preserve_default=True,
),
migrations.AddField(
model_name='email_proveedor',
name='proveedor',
field=models.ForeignKey(to='Proveedor.Proveedor'),
preserve_default=True,
),
]
| jrmendozat/mtvm | Proveedor/migrations/0001_initial.py | Python | gpl-2.0 | 4,084 |
#! /usr/bin/python
#fitting parameters: minimum, initial guess and maximum values
F0 = [0.1, 0.51, 2.]
R0 = [100., 101., 102.]
gamma0 = [1., 1.2, 1.3]
Temp0 = [299., 300., 300.]
W0 = [5.2, 5.21, 5.22]
#ploting parameters
markers = 'o'
colors = 'b'
font = 45
lw = 5
mw = 15
import numpy as np
import sys
import os
import matplotlib.pyplot as plt
import matplotlib
import sys
mainpath,filename = os.path.split(os.path.realpath(__file__))
emissionpath,mainfolder = os.path.split(mainpath)
pythonpath = emissionpath + '/python'
sys.path.append(pythonpath)
import getelec_mod as gt
matplotlib.rcParams["font.family"] = 'Times New Roman'
matplotlib.rcParams["font.size"] = font
matplotlib.rcParams["axes.labelsize"] = font
matplotlib.rcParams["xtick.labelsize"] = font
matplotlib.rcParams["ytick.labelsize"] = font
matplotlib.rcParams["legend.fontsize"] = font
matplotlib.rcParams['text.latex.preamble'] = [r'\usepackage{amsmath}']
filename = sys.argv[1]
fig = plt.figure(figsize=(20,15))
ax = fig.gca()
ax.grid()
ax.set_xlabel(r"$1/F\ [ \mathrm{nm/V} ]$")
ax.set_ylabel(r"$I \ [ \mathrm{nA} ]$")
Vdata, Idata = np.loadtxt(filename,unpack=True)
xdata = 1./Vdata
ydata = np.log(Idata)
fit= gt.fitML(xdata,ydata, F0, W0, R0, gamma0, Temp0)
popt = fit.x
yopt = gt.MLplot(xdata, popt[0], popt[1], popt[2], popt[3], popt[4])
yshift = max(yopt) - max(ydata)
print 'beta = %10.3e, W = %10.3f, R = %10.3f, gamma = %10.3f, Temp = %10.3f, sigmaAeff = %10.3e' \
% (popt[0], popt[1], popt[2], popt[3], popt[4], 1e-9*np.exp(-yshift))
xth = np.linspace(min(xdata),max(xdata),100)
yth = np.exp(gt.MLplot(xth, popt[0], popt[1], popt[2], popt[3], popt[4]) - yshift)
ax.semilogy(xdata / popt[0],Idata,markers, \
label = r'Data', markersize = mw, \
mec = colors, mfc = 'none', mew = 2)
ax.semilogy(xth / popt[0],yth,c=colors, linewidth = lw, label = 'fitting')
ax.legend(loc="best")
fig.tight_layout()
plt.show()
| AndKyr/GETELEC | python/fitdata.py | Python | gpl-3.0 | 1,968 |
"""
This example opens the connection in async mode (does not work properly in Python 2.7).
"""
import os
import time
from msl.equipment import (
EquipmentRecord,
ConnectionRecord,
Backend,
)
record = EquipmentRecord(
manufacturer='Pico Technology',
model='5244B', # update for your PicoScope
serial='DY135/055', # update for your PicoScope
connection=ConnectionRecord(
backend=Backend.MSL,
address='SDK::ps5000a.dll', # update for your PicoScope
properties={'open_async': True}, # opening in async mode is done in the properties
)
)
# optional: ensure that the PicoTech DLLs are available on PATH
os.environ['PATH'] += os.pathsep + r'C:\Program Files\Pico Technology\SDK\lib'
t0 = time.time()
scope = record.connect()
while True:
now = time.time()
progress = scope.open_unit_progress()
print('Progress: {}%'.format(progress))
if progress == 100:
break
time.sleep(0.02)
print('Took {:.2f} seconds to establish a connection to the PicoScope'.format(time.time()-t0))
# flash the LED light for 5 seconds
scope.flash_led(-1)
time.sleep(5)
| MSLNZ/msl-equipment | msl/examples/equipment/picotech/picoscope/open_unit_async.py | Python | mit | 1,130 |
# test errors from bad operations (unary, binary, etc)
# unsupported unary operators
try:
~None
except TypeError:
print('TypeError')
try:
~''
except TypeError:
print('TypeError')
try:
~[]
except TypeError:
print('TypeError')
# unsupported binary operators
try:
False in True
except TypeError:
print('TypeError')
try:
1 * {}
except TypeError:
print('TypeError')
try:
1 in 1
except TypeError:
print('TypeError')
# unsupported subscription
try:
1[0] = 1
except TypeError:
print('TypeError')
try:
'a'[0] = 1
except TypeError:
print('TypeError')
try:
del 1[0]
except TypeError:
print('TypeError')
# not an iterator
try:
next(1)
except TypeError:
print('TypeError')
# must be an exception type
try:
raise 1
except TypeError:
print('TypeError')
# no such name in import
try:
from sys import youcannotimportmebecauseidontexist
except ImportError:
print('ImportError')
| bvernoux/micropython | tests/basics/op_error.py | Python | mit | 962 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Blobstore-specific Files API calls."""
from __future__ import with_statement
__all__ = ['create', 'get_blob_key', 'get_file_name']
import urllib
from google.appengine.api import datastore
from google.appengine.api import namespace_manager
from mapreduce.lib.files import file as files
from google.appengine.ext import blobstore
_BLOBSTORE_FILESYSTEM = files.BLOBSTORE_FILESYSTEM
_BLOBSTORE_DIRECTORY = '/' + _BLOBSTORE_FILESYSTEM + '/'
_BLOBSTORE_NEW_FILE_NAME = 'new'
_CREATION_HANDLE_PREFIX = 'writable:'
_MIME_TYPE_PARAMETER = 'content_type'
_BLOBINFO_UPLOADED_FILENAME_PARAMETER = 'file_name'
def create(mime_type='application/octet-stream',
_blobinfo_uploaded_filename=None):
"""Create a writable blobstore file.
Args:
mime_type: Resulting blob content MIME type as string.
_blobinfo_uploaded_filename: Resulting blob's BlobInfo file name as string.
Returns:
A file name for blobstore file. This file can be opened for write
by File API open function. To read the file or obtain its blob key, finalize
it and call get_blob_key function.
"""
if not mime_type:
raise files.InvalidArgumentError('Empty mime_type')
if not isinstance(mime_type, basestring):
raise files.InvalidArgumentError('Expected string for mime_type')
params = {_MIME_TYPE_PARAMETER: mime_type}
if _blobinfo_uploaded_filename:
if not isinstance(_blobinfo_uploaded_filename, basestring):
raise files.InvalidArgumentError(
'Expected string for _blobinfo_uploaded_filename')
params[_BLOBINFO_UPLOADED_FILENAME_PARAMETER] = _blobinfo_uploaded_filename
return files._create(_BLOBSTORE_FILESYSTEM, params=params)
_BLOB_FILE_INDEX_KIND = '__BlobFileIndex__'
_BLOB_KEY_PROPERTY_NAME = 'blob_key'
def get_blob_key(create_file_name):
"""Get a blob key for finalized blobstore file.
Args:
create_file_name: Writable blobstore filename as obtained from create()
function. The file should be finalized.
Returns:
An instance of apphosting.ext.blobstore.BlobKey for corresponding blob
or None if the blob referred to by the file name is not finalized.
Raises:
mapreduce.lib.files.InvalidFileNameError if the file name is not
a valid nonfinalized blob file name.
"""
if not create_file_name:
raise files.InvalidArgumentError('Empty file name')
if not isinstance(create_file_name, basestring):
raise files.InvalidArgumentError('Expected string for file name')
if not create_file_name.startswith(_BLOBSTORE_DIRECTORY):
raise files.InvalidFileNameError(
'Filename %s passed to get_blob_key doesn\'t have prefix %s' %
(create_file_name, _BLOBSTORE_DIRECTORY))
ticket = create_file_name[len(_BLOBSTORE_DIRECTORY):]
if not ticket.startswith(_CREATION_HANDLE_PREFIX):
return blobstore.BlobKey(ticket)
blob_file_index = datastore.Get([datastore.Key.from_path(
_BLOB_FILE_INDEX_KIND, ticket, namespace='')])[0]
if blob_file_index:
blob_key_str = blob_file_index[_BLOB_KEY_PROPERTY_NAME]
results = datastore.Get([datastore.Key.from_path(
blobstore.BLOB_INFO_KIND, blob_key_str, namespace='')])
if results[0] is None:
return None
else:
query = datastore.Query(blobstore.BLOB_INFO_KIND,
{'creation_handle =': ticket},
keys_only=True,
namespace='')
results = query.Get(1)
if not results:
return None
blob_key_str = results[0].name()
return blobstore.BlobKey(blob_key_str)
def get_file_name(blob_key):
"""Get a filename to read from the blob.
Args:
blob_key: An instance of BlobKey.
Returns:
File name as string which can be used with File API to read the file.
"""
if not blob_key:
raise files.InvalidArgumentError('Empty blob key')
if not isinstance(blob_key, (blobstore.BlobKey, basestring)):
raise files.InvalidArgumentError('Expected string or blobstore.BlobKey')
return '%s%s' % (_BLOBSTORE_DIRECTORY, blob_key)
def _delete(filename):
"""Permanently delete a file.
Args:
filename: finalized file name as string.
"""
blob_key = get_blob_key(filename)
if blob_key is None:
return
blob_info = blobstore.BlobInfo.get(blob_key)
if blob_info is None:
return
blob_info.delete()
| fernandalavalle/mlab-ns | server/mapreduce/lib/files/blobstore.py | Python | apache-2.0 | 4,936 |
from lab_assistant.storage.backends.base import StorageBackend
from lab_assistant.storage.backends.null import NullBackend
from lab_assistant.storage.backends.redis import RedisBackend
from lab_assistant.tests import cases
import lab_assistant
class FakeStorage(StorageBackend):
pass
class TestGetStorage(cases.TestCase):
def test_get_default_storage(self):
storage = lab_assistant.storage.get_storage()
assert isinstance(storage, NullBackend)
lab_assistant.conf.storage.update({
'path': 'lab_assistant.storage.backends.redis.RedisBackend'
})
storage = lab_assistant.storage.get_storage()
assert isinstance(storage, RedisBackend)
def test_same_instance_returned_multiple_calls(self):
first = lab_assistant.storage.get_storage()
second = lab_assistant.storage.get_storage()
assert id(first) == id(second)
def test_get_custom_storage(self):
path = 'lab_assistant.tests.test_storage.FakeStorage'
storage = lab_assistant.storage.get_storage(path, test=True)
assert isinstance(storage, FakeStorage)
assert storage.kwargs['test']
class TestRedisBackend(cases.RedisTestCase):
storage_class = 'lab_assistant.storage.backends.redis.RedisBackend'
def storage(self):
return lab_assistant.storage.get_storage(self.storage_class)
def test_get_nonexistent(self):
assert self.storage().get(123456) is None
def test_set_and_get(self):
storage = self.storage()
storage.set(123456, 'fake_result')
assert storage.get(123456) == 'fake_result'
def test_set_and_list(self):
storage = self.storage()
storage.set(123456, 'fake_result')
assert list(storage.list()) == ['fake_result']
def test_set_and_remove(self):
storage = self.storage()
storage.set(123456, 'fake_result')
assert storage.get(123456) == 'fake_result'
storage.remove(123456)
assert storage.get(123456) is None
def test_set_and_clear_and_list(self):
storage = self.storage()
storage.set(123456, 'fake_result')
assert list(storage.list()) == ['fake_result']
storage.clear()
assert list(storage.list()) == []
def test_public_api(self):
storage = self.storage()
lab_assistant.storage.store('first', storage)
key = lab_assistant.storage.store('second', storage)
assert lab_assistant.storage.retrieve(key, storage) == 'second'
assert len(list(lab_assistant.storage.retrieve_all(storage))) == 2
lab_assistant.storage.remove(key, storage)
assert len(list(lab_assistant.storage.retrieve_all(storage))) == 1
lab_assistant.storage.clear(storage)
assert list(lab_assistant.storage.retrieve_all(storage)) == []
| joealcorn/lab_assistant | lab_assistant/tests/test_storage.py | Python | mit | 2,837 |
import pytest
from detectem.core import HarProcessor
from detectem.settings import INLINE_SCRIPT_ENTRY, MAIN_ENTRY
class TestHarProcessor:
HAR_NO_URL_REDIRECT = [
{"request": {"url": "http://domain.tld/"}, "response": {}},
{"request": {"url": "http://domain.tld/js/script.js"}, "response": {}},
]
HAR_URL_REDIRECT = [
{
"request": {"url": "http://domain.tld/"},
"response": {
"headers": [{"name": "Location", "value": "/new/default.html"}]
},
},
{"request": {"url": "http://domain.tld/new/default.html"}, "response": {}},
]
def test__set_entry_type(self):
data = {}
HarProcessor._set_entry_type(data, "marker")
assert data["detectem"]["type"] == "marker"
@pytest.mark.parametrize(
"entry,result",
[
({"response": {}}, None),
({"response": {"headers": [{"name": "any"}]}}, None),
(HAR_URL_REDIRECT[0], "/new/default.html"),
],
)
def test__get_location(self, entry, result):
assert HarProcessor._get_location(entry) == result
def test__script_to_har_entry(self):
url = "http://url"
content = "content"
entry = HarProcessor._script_to_har_entry(content, url)
assert entry["request"]["url"] == url
assert entry["response"]["url"] == url
assert entry["response"]["content"]["text"] == content
assert entry["detectem"]["type"] == INLINE_SCRIPT_ENTRY
@pytest.mark.parametrize(
"entries,index", [(HAR_NO_URL_REDIRECT, 0), (HAR_URL_REDIRECT, 1)]
)
def test_mark_entries(self, entries, index):
HarProcessor().mark_entries(entries)
assert entries[index]["detectem"]["type"] == MAIN_ENTRY
| spectresearch/detectem | tests/test_core.py | Python | mit | 1,799 |
from django.conf.urls import patterns, url
from home.views import IndexView
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
) | sophilabs/djangoboot | app/home/urls.py | Python | mit | 157 |
from vutman.scripts import generate_vut_to_file
def run(output_path):
generate_vut_to_file(output_path)
| daniellawrence/django-vutman | scripts/generate_vut.py | Python | mit | 110 |
"""
Trail Marker
------------
The ``trail`` marker is like the ``line`` marker, but it allows properties of
the line (such as thickness) to vary along the length of the line.
This shows a simple example of the trail mark using stock prices.
"""
# category: other charts
import altair as alt
from vega_datasets import data
stocks = data.stocks.url
alt.Chart(stocks).mark_trail().encode(
x='date:T',
y='price:Q',
size='price:Q',
color='symbol:N'
)
| ellisonbg/altair | altair/vegalite/v2/examples/trail_marker.py | Python | bsd-3-clause | 464 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v8.resources.types import webpage_view
from google.ads.googleads.v8.services.types import webpage_view_service
from .base import WebpageViewServiceTransport, DEFAULT_CLIENT_INFO
class WebpageViewServiceGrpcTransport(WebpageViewServiceTransport):
"""gRPC backend transport for WebpageViewService.
Service to manage webpage views.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = google.auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
@classmethod
def create_channel(
cls,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
scopes=scopes or cls.AUTH_SCOPES,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_webpage_view(
self,
) -> Callable[
[webpage_view_service.GetWebpageViewRequest], webpage_view.WebpageView
]:
r"""Return a callable for the get webpage view method over gRPC.
Returns the requested webpage view in full detail.
Returns:
Callable[[~.GetWebpageViewRequest],
~.WebpageView]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_webpage_view" not in self._stubs:
self._stubs["get_webpage_view"] = self.grpc_channel.unary_unary(
"/google.ads.googleads.v8.services.WebpageViewService/GetWebpageView",
request_serializer=webpage_view_service.GetWebpageViewRequest.serialize,
response_deserializer=webpage_view.WebpageView.deserialize,
)
return self._stubs["get_webpage_view"]
__all__ = ("WebpageViewServiceGrpcTransport",)
| googleads/google-ads-python | google/ads/googleads/v8/services/services/webpage_view_service/transports/grpc.py | Python | apache-2.0 | 10,127 |
from threading import Thread
from loyalty import Loyalty, TopAlters, get_top_alters, text_top_category, Entropy
from utils import HOME
import json
import os
import pandas as pd
import numpy as np
PATH = HOME + '/Dropbox'
INPUT_FRIENDS = PATH + '/LoyaltyDists/friends/'
INPUT_USERS = PATH + '/LoyaltyDists/user/'
OUTPUT_FRIENDS = PATH + '/Twitter/Raw.Distributions2.Friends/'
OUTPUT_USER = PATH + '/Twitter/Raw.Distributions2.Users/'
class Resumer(Thread):
def __init__(self, interaction, inputpath, outputpath):
super(Resumer, self).__init__()
self.interaction = interaction
self.inputpath = inputpath
self.outputpath = outputpath
def run(self):
with open(self.outputpath, 'wb') as outfile:
for _file in os.listdir(self.inputpath):
df = pd.read_csv(self.inputpath + _file)
df = df.loc[df['interaction'] == self.interaction]
counts = np.sort(df['counts'].values)[::-1]
json_line = {_file.split('.')[0]: counts.tolist()}
outfile.write(json.dumps(json_line) + '\n')
# Resumer('likes', INPUT_FRIENDS, OUTPUT_FRIENDS + 'like.jsons').run()
# Resumer('mentions', INPUT_FRIENDS, OUTPUT_FRIENDS + 'mention.jsons').run()
# Resumer('retweets', INPUT_FRIENDS, OUTPUT_FRIENDS + 'retweet.jsons').run()
# Resumer('likes', INPUT_USERS, OUTPUT_USER + 'like.jsons').run()
# Resumer('mentions', INPUT_USERS, OUTPUT_USER + 'mention.jsons').run()
# Resumer('retweets', INPUT_USERS, OUTPUT_USER + 'retweet.jsons').run()
def interactions_loyalty(path1, path2):
for filename in ['like.jsons',
'mention.jsons',
'retweet.jsons']:
in_filename = path1 + filename
out_filename = path2 + filename
Loyalty(in_filename=in_filename, out_filename=out_filename).run()
# interactions_loyalty(OUTPUT_FRIENDS, PATH + '/Twitter/Interactions.Friends.Loyalty/')
# interactions_loyalty(OUTPUT_USER, PATH + '/Twitter/Interactions.Users.Loyalty/')
def top_alters(path1, path2):
for filename in [('like.jsons', 'like.csv'),
('mention.jsons', 'mention.csv'),
('retweet.jsons', 'retweet.csv')]:
# in_filename = PATH + 'Text.Loyalty/' + filename[0]
# out_filename = PATH + 'Text.TopAlters/' + filename[1]
in_filename = PATH + path1 + filename[0]
out_filename = PATH + path2 + filename[1]
TopAlters(in_filename=in_filename, out_filename=out_filename).run()
# top_alters('/Twitter/Interactions.Friends.Loyalty/', '/Twitter/Interactions.Friends.TopAlters/')
# top_alters('/Twitter/Interactions.Users.Loyalty/', '/Twitter/Interactions.Users.TopAlters/')
def top_category(datapath):
for _file in ['like.csv',
'mention.csv',
'retweet.csv']:
total, values = get_top_alters(filename=datapath + _file)
print _file, total, ' & '.join(np.array(np.round(100.0 * np.array(values) / total, 4)).astype(str)) + '\\\\'
# print '\n'
# top_category(PATH + '/Twitter/Interactions.Friends.TopAlters/')
# print '\n'
# top_category(PATH + '/Twitter/Interactions.Users.TopAlters/')
# print '\n'
# text_top_category()
ENTROPY_PATH = HOME + '/Dropbox/Twitter/Entropy/'
BASE_PATH = HOME + '/Dropbox/Twitter/'
for f in ['like.jsons', 'mention.jsons', 'retweet.jsons']:
Entropy(BASE_PATH + 'Filtered.Distributions/' + f, ENTROPY_PATH + 'dist_' + f).run()
for f in ['like.jsons', 'mention.jsons', 'retweet.jsons']:
Entropy(BASE_PATH + 'Text.Distributions/' + f, ENTROPY_PATH + 'text_' + f).run()
for f in ['like.jsons', 'mention.jsons', 'retweet.jsons']:
Entropy(BASE_PATH + 'Raw.Distributions2.Friends/' + f, ENTROPY_PATH + 'followers_' + f).run()
for f in ['like.jsons', 'mention.jsons', 'retweet.jsons']:
Entropy(BASE_PATH + 'Raw.Distributions2.Users/' + f, ENTROPY_PATH + 'no_followers_' + f).run()
| jblupus/PyLoyaltyProject | loyalty/loyalty2.py | Python | bsd-2-clause | 3,916 |
#! /usr/bin/env python
##############################################################################
# Based on DendroPy Phylogenetic Computing Library.
#
# Copyright 2010 Jeet Sukumaran and Mark T. Holder.
# All rights reserved.
#
# See "LICENSE.txt" for terms and conditions of usage.
#
# If you use this work or any portion thereof in published work,
# please cite it as:
#
# Sukumaran, J. and M. T. Holder. 2010. DendroPy: a Python library
# for phylogenetic computing. Bioinformatics 26: 1569-1571.
#
##############################################################################
"""
Path mapping for various test resources.
"""
from peyotl.utility import pretty_timestamp, get_logger
try:
import anyjson
except:
import json
class Wrapper(object):
pass
anyjson = Wrapper()
anyjson.loads = json.loads
import codecs
import os
_LOG = get_logger(__name__)
try:
import pkg_resources
# NOTE that resource_filename can return an absolute or package-relative
# path, depending on the package/egg type! We'll try to ensure absolute
# paths in some areas below.
TESTS_DIR = pkg_resources.resource_filename("peyotl", "test")
SCRIPTS_DIR = pkg_resources.resource_filename("peyotl", os.path.join(os.pardir, "scripts"))
_LOG.debug("using pkg_resources path mapping")
except:
LOCAL_DIR = os.path.dirname(__file__)
TESTS_DIR = os.path.join(LOCAL_DIR, os.path.pardir)
PACKAGE_DIR = os.path.join(TESTS_DIR, os.path.pardir)
SCRIPTS_DIR = os.path.join(PACKAGE_DIR, os.path.pardir, "scripts")
_LOG.debug("using local filesystem path mapping")
TESTS_DATA_DIR = os.path.join(TESTS_DIR, "data")
TESTS_OUTPUT_DIR = os.path.join(TESTS_DIR, "output")
TESTS_SCRATCH_DIR = os.path.join(TESTS_DIR, "scratch")
TESTS_COVERAGE_DIR = os.path.join(TESTS_DIR, "coverage")
TESTS_COVERAGE_REPORT_DIR = os.path.join(TESTS_COVERAGE_DIR, "report")
TESTS_COVERAGE_SOURCE_DIR = os.path.join(TESTS_COVERAGE_DIR, "source")
TEST_PHYLESYSTEM_PAR = os.path.join(TESTS_DATA_DIR, 'mini_par')
TEST_PHYLESYSTEM_MIRROR_PAR = os.path.join(TEST_PHYLESYSTEM_PAR, 'mirror')
TEST_PHYLESYSTEM_TEMPLATE = os.path.join(TESTS_DATA_DIR, 'template_mini_par')
def get_test_ot_service_domains():
from peyotl.api.wrapper import get_domains_obj
return get_domains_obj() # We may need to point this at dev instances in some cases.
def get_test_repos(requested=None):
"""Returns a dict mapping a nicknam (mini_.*) to the full path to that
testing repository, if that testing repository is an existing directory.
Empty dict if peyotl is not set up for testing"""
repo_parent_path = TEST_PHYLESYSTEM_PAR
_LOG.warn("TESTING repo_parent_path:{}".format(repo_parent_path))
# NOTE that we want absolute filesystem paths for repos, so that downstream git
# actions can always find their target files regardless of --work-tree
# setting (which dictates the working directory for git ops)
if not os.path.isabs(repo_parent_path):
repo_parent_path = os.path.abspath(repo_parent_path)
_LOG.warn("ABSOLUTE repo_parent_path:{}".format(repo_parent_path))
poss = {'mini_phyl': os.path.join(repo_parent_path, 'mini_phyl'),
'mini_system': os.path.join(repo_parent_path, 'mini_system'),
'mini_collections': os.path.join(repo_parent_path, 'mini_collections'),
'mini_amendments': os.path.join(repo_parent_path, 'mini_amendments'),
}
if requested is not None:
try:
poss = {k: poss[k] for k in requested}
except KeyError:
return {}
return {k: v for k, v in poss.items() if os.path.isdir(v)}
def get_test_phylesystem_mirror_parent():
return TEST_PHYLESYSTEM_MIRROR_PAR
def get_test_phylesystem_mirror_info():
return {'push': {'parent_dir': get_test_phylesystem_mirror_parent()}}
def get_test_phylesystem():
from peyotl.phylesystem.phylesystem_umbrella import _Phylesystem
r = get_test_repos()
mi = get_test_phylesystem_mirror_info()
mi['push']['remote_map'] = {'GitHubRemote': '[email protected]:mtholder'}
return _Phylesystem(repos_dict=r, mirror_info=mi)
def all_files(prefix):
d = os.path.join(TESTS_DATA_DIR, prefix)
s = set()
for p in os.listdir(d):
fp = os.path.join(d, p)
if os.path.isfile(fp):
s.add(fp)
return s
def nexson_obj(filename):
"""Returns a dict that is the deserialized nexson object
'filename' should be the fragment of the filepath below
the nexson test dir.
"""
with nexson_file_obj(filename) as fo:
fc = fo.read()
return anyjson.loads(fc)
def nexson_file_obj(filename):
""" Returns file object.
'filename' should be the fragment of the filepath below
the nexson test dir.
"""
fp = nexson_source_path(filename=filename)
return codecs.open(fp, mode='r', encoding='utf-8')
def shared_test_dir():
return os.path.join(TESTS_DATA_DIR, "shared-api-tests")
def nexson_source_path(filename=None):
if filename is None:
filename = ""
return os.path.join(TESTS_DATA_DIR, "nexson", filename)
def nexml_source_path(filename=None):
if filename is None:
filename = ""
return os.path.join(TESTS_DATA_DIR, "nexml", filename)
def named_output_stream(filename=None, suffix_timestamp=True):
return open(named_output_path(filename=filename, suffix_timestamp=suffix_timestamp), "w")
def named_output_path(filename=None, suffix_timestamp=True):
if filename is None:
filename = ""
else:
if isinstance(filename, list):
filename = os.path.sep.join(filename)
if suffix_timestamp:
filename = "%s.%s" % (filename, pretty_timestamp(style=1))
if not os.path.exists(TESTS_OUTPUT_DIR):
os.makedirs(TESTS_OUTPUT_DIR)
return os.path.join(TESTS_OUTPUT_DIR, filename)
def script_source_path(filename=None):
if filename is None:
filename = ""
return os.path.join(SCRIPTS_DIR, filename)
def next_unique_scratch_filepath(fn):
frag = os.path.join(TESTS_SCRATCH_DIR, fn)
if os.path.exists(TESTS_SCRATCH_DIR):
if not os.path.isdir(TESTS_SCRATCH_DIR):
mf = 'Cannot create temp file "{f}" because a file "{c}" is in the way'
msg = mf.format(f=frag, c=TESTS_SCRATCH_DIR)
raise RuntimeError(msg)
else:
os.makedirs(TESTS_SCRATCH_DIR)
return next_unique_filepath(frag)
def next_unique_filepath(fp):
"""Not thread safe.
"""
if os.path.exists(fp):
ind = 0
while True:
np = '{f}.{i:d}'.format(f=fp, i=ind)
if not os.path.exists(np):
return np
ind += 1
return fp
def json_source_path(filename=None):
if filename is None:
filename = ""
return os.path.join(TESTS_DATA_DIR, "json", filename)
def collection_obj(filename):
"""Returns a dict that is the deserialized collection object
'filename' should be the fragment of the filepath below
the collection test dir.
"""
with collection_file_obj(filename) as fo:
fc = fo.read()
return anyjson.loads(fc)
def collection_file_obj(filename):
""" Returns file object.
'filename' should be the fragment of the filepath below
the collection test dir.
"""
fp = collection_source_path(filename=filename)
return codecs.open(fp, mode='r', encoding='utf-8')
def collection_source_path(filename=None):
if filename is None:
filename = ""
return os.path.join(TESTS_DATA_DIR, "collections", filename)
def amendment_obj(filename):
"""Returns a dict that is the deserialized amendment object
'filename' should be the fragment of the filepath below
the amendment test dir.
"""
with amendment_file_obj(filename) as fo:
fc = fo.read()
return anyjson.loads(fc)
def amendment_file_obj(filename):
""" Returns file object.
'filename' should be the fragment of the filepath below
the amendment test dir.
"""
fp = amendment_source_path(filename=filename)
return codecs.open(fp, mode='r', encoding='utf-8')
def amendment_source_path(filename=None):
if filename is None:
filename = ""
return os.path.join(TESTS_DATA_DIR, "amendments", filename)
| mtholder/peyotl | peyotl/test/support/pathmap.py | Python | bsd-2-clause | 8,329 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .entity_health import EntityHealth
class NodeHealth(EntityHealth):
"""Information about the health of a Service Fabric node.
:param aggregated_health_state: The HealthState representing the
aggregated health state of the entity computed by Health Manager.
The health evaluation of the entity reflects all events reported on the
entity and its children (if any).
The aggregation is done by applying the desired health policy.
. Possible values include: 'Invalid', 'Ok', 'Warning', 'Error', 'Unknown'
:type aggregated_health_state: str
:param health_events: The list of health events reported on the entity.
:type health_events: list of :class:`HealthEvent
<azure.servicefabric.models.HealthEvent>`
:param unhealthy_evaluations: The unhealthy evaluations that show why the
current aggregated health state was returned by Health Manager.
:type unhealthy_evaluations: list of :class:`HealthEvaluationWrapper
<azure.servicefabric.models.HealthEvaluationWrapper>`
:param name: Name of the node whose health information is described by
this object.
:type name: str
"""
_attribute_map = {
'aggregated_health_state': {'key': 'AggregatedHealthState', 'type': 'str'},
'health_events': {'key': 'HealthEvents', 'type': '[HealthEvent]'},
'unhealthy_evaluations': {'key': 'UnhealthyEvaluations', 'type': '[HealthEvaluationWrapper]'},
'name': {'key': 'Name', 'type': 'str'},
}
def __init__(self, aggregated_health_state=None, health_events=None, unhealthy_evaluations=None, name=None):
super(NodeHealth, self).__init__(aggregated_health_state=aggregated_health_state, health_events=health_events, unhealthy_evaluations=unhealthy_evaluations)
self.name = name
| SUSE/azure-sdk-for-python | azure-servicefabric/azure/servicefabric/models/node_health.py | Python | mit | 2,273 |
import dexml
from dexml import fields
class PatchElement(dexml.Model):
class meta:
tagname = "patch"
order_sensitive = False
name = fields.String(tagname="name")
patchName = fields.String(tagname="patchName")
patchHash = fields.String(tagname="patchHash")
targetHash = fields.String(tagname="targetHash")
targetSize = fields.String(tagname="targetSize")
targetPerm = fields.String(tagname="targetPerm")
sourceHash = fields.String(tagname="sourceHash")
package = fields.String(tagname="package")
class FileElement(dexml.Model):
class meta:
tagname = "file"
order_sensitive = False
name = fields.String(tagname="name")
package = fields.String(tagname="package")
included = fields.String(tagname="included")
fileHash = fields.String(tagname="hash", required=False)
size = fields.String(tagname="size", required=False)
permissions = fields.String(tagname="permissions", required=False)
is_main_binary = fields.String(tagname="is-main-binary", required=False)
targetLink = fields.String(tagname="target", required=False)
def __eq__(self, other):
return ((self.name == other.name) and
(self.fileHash == other.fileHash) and
(self.size == other.size) and
(self.permissions == other.permissions))
def __str__(self):
return "%s [hash: %s, size: %s, permissions: %s]" % (self.name, self.fileHash, self.size, self.permissions)
class PackageElement(dexml.Model):
class meta:
tagname = "package"
order_sensitive = False
name = fields.String(tagname="name")
fileHash = fields.String(tagname="hash")
size = fields.String(tagname="size")
class Update(dexml.Model):
class meta:
tagname = "update"
order_sensitive = False
version = fields.Integer()
targetVersion = fields.String(tagname="targetVersion")
platform = fields.String(tagname="platform")
dependencies = fields.List(fields.String(tagname="file"), tagname="dependencies")
pathprefix = fields.String(tagname="pathprefix", required=False)
install = fields.List(FileElement, tagname="install", required=False)
patches = fields.List(PatchElement, tagname="patches", required=False)
manifest = fields.List(FileElement, tagname="manifest")
packages = fields.List(PackageElement, tagname="packages")
def get_filemap(self):
return {a.name: a for a in self.manifest}
| RasPlex/plex-home-theatre | plex/Update-Installer/manifest-tools/update.py | Python | gpl-2.0 | 2,367 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division, unicode_literals
##
## This file is part of DaBroker, a distributed data access manager.
##
## DaBroker is Copyright © 2014 by Matthias Urlichs <[email protected]>,
## it is licensed under the GPLv3. See the file `README.rst` for details,
## including optimistic statements by the author.
##
## This paragraph is auto-generated and may self-destruct at any time,
## courtesy of "make update". The original is in ‘utils/_boilerplate.py’.
## Thus, please do not remove the next line, or insert any blank lines.
##BP
import weakref
import asyncio
import aioamqp
import functools
from .msg import _RequestMsg,PollMsg,RequestMsg,BaseMsg
from .rpc import CC_DICT,CC_DATA
from ..util import import_string
import logging
logger = logging.getLogger(__name__)
class _ch(object):
"""Helper object"""
channel = None
exchange = None
queue = None
class Connection(object):
amqp = None # connection
def __init__(self,unit):
self._loop = unit._loop
self.rpcs = {}
self.alerts = {}
self.replies = {}
self.unit = weakref.ref(unit)
cfg = unit.config['amqp']['server']
if 'connect_timeout' in cfg:
cfg['connect_timeout'] = float(cfg['connect_timeout'])
if 'ssl' in cfg and isinstance(cfg['ssl'],str):
cfg['ssl'] = cfg['ssl'].lower() == 'true'
if 'port' in cfg:
cfg['port'] = int(cfg['port'])
self.cfg = cfg
codec_type = unit.config['amqp']['codec']
if codec_type[0] == '_':
codec_type = codec_type[1:]
self.codec = import_string('dabroker.base.codec.%s.RawCodec' % (codec_type,))()
self.mime_type = "application/"+codec_type
else:
self.codec = import_string('dabroker.base.codec.%s.Codec' % (codec_type,))()
self.codec_type = codec_type
self.mime_type = "application/"+codec_type+"+dabroker"
async def connect(self):
try:
self.amqp_transport,self.amqp = await aioamqp.connect(loop=self._loop, **self.cfg)
except Exception as e:
logger.exception("Not connected to AMPQ: host=%s vhost=%s user=%s", self.cfg['host'],self.cfg['virtualhost'],self.cfg['login'])
raise
await self.setup_channels()
async def _setup_one(self,name,typ,callback=None, q=None, route_key=None, exclusive=None):
"""\
Register
"""
unit = self.unit()
cfg = unit.config['amqp']
ch = _ch()
setattr(self,name,ch)
logger.debug("setup RPC for %s",name)
ch.channel = await self.amqp.channel()
ch.exchange = cfg['exchanges'][name]
logger.debug("Chan %s: exchange %s", ch.channel,cfg['exchanges'][name])
if exclusive is None:
exclusive = (q is not None)
await ch.channel.exchange_declare(cfg['exchanges'][name], typ, auto_delete=False, passive=False)
if q is not None:
assert callback is not None
ch.queue = await ch.channel.queue_declare(cfg['queues'][name]+q, auto_delete=True, passive=False, exclusive=exclusive)
await ch.channel.basic_qos(prefetch_count=1,prefetch_size=0,connection_global=False)
logger.debug("Chan %s: read %s", ch.channel,cfg['queues'][name]+q)
await ch.channel.basic_consume(queue_name=cfg['queues'][name]+q, callback=callback)
if route_key is not None:
logger.debug("Chan %s: bind %s %s %s", ch.channel,cfg['exchanges'][name], route_key, ch.queue['queue'])
await ch.channel.queue_bind(ch.queue['queue'], cfg['exchanges'][name], routing_key=route_key)
else:
assert callback is None
logger.debug("setup RPC for %s done",name)
async def setup_channels(self):
"""Configure global channels"""
u = self.unit()
await self._setup_one("alert",'topic', self._on_alert, u.uuid)
await self._setup_one("rpc",'topic')
await self._setup_one("reply",'direct', self._on_reply, u.uuid, u.uuid)
async def _on_alert(self, channel,body,envelope,properties):
logger.debug("read alert message %s",envelope.delivery_tag)
try:
msg = self.codec.decode(body)
msg = BaseMsg.load(msg,properties)
rpc = self.alerts[msg.name]
if rpc.call_conv == CC_DICT:
a=(); k=msg.data
elif rpc.call_conv == CC_DATA:
a=(msg.data,); k={}
else:
a=(msg,); k={}
reply_to = getattr(msg, 'reply_to',None)
if reply_to:
reply = msg.make_response()
try:
reply.data = await rpc.run(*a,**k)
except Exception as exc:
logger.exception("error on alert %s: %s", envelope.delivery_tag, body)
reply.set_error(exc, rpc.name,"reply")
reply,props = reply.dump(self)
if reply == "":
reply = "0"
else:
reply = self.codec.encode(reply)
await self.reply.channel.publish(reply, self.reply.exchange, reply_to, properties=props)
else:
try:
await rpc.run(*a,**k)
except Exception as exc:
logger.exception("error on alert %s: %s", envelope.delivery_tag, body)
except Exception as exc:
logger.exception("problem with rpc %s: %s", envelope.delivery_tag, body)
await self.alert.channel.basic_reject(envelope.delivery_tag)
else:
logger.debug("ack rpc %s",envelope.delivery_tag)
await self.alert.channel.basic_client_ack(envelope.delivery_tag)
async def _on_rpc(self, rpc, channel,body,envelope,properties):
logger.debug("read rpc message %s",envelope.delivery_tag)
try:
msg = self.codec.decode(body)
msg = BaseMsg.load(msg,properties)
assert msg.name == rpc.name, (msg.name, rpc.name)
reply = msg.make_response()
try:
if rpc.call_conv == CC_DICT:
a=(); k=msg.data
elif rpc.call_conv == CC_DATA:
a=(msg.data,); k={}
else:
a=(msg,); k={}
reply.data = await rpc.run(*a,**k)
except Exception as exc:
logger.exception("error on rpc %s: %s", envelope.delivery_tag, body)
reply.set_error(exc, rpc.name,"reply")
reply,props = reply.dump(self)
if reply == "":
reply = "0"
else:
reply = self.codec.encode(reply)
await rpc.channel.publish(reply, self.reply.exchange, msg.reply_to, properties=props)
except Exception as exc:
logger.exception("problem with rpc %s: %s", envelope.delivery_tag, body)
await rpc.channel.basic_reject(envelope.delivery_tag)
else:
logger.debug("ack rpc %s",envelope.delivery_tag)
await rpc.channel.basic_client_ack(envelope.delivery_tag)
async def _on_reply(self, channel,body,envelope,properties):
logger.debug("read reply message %s",envelope.delivery_tag)
try:
msg = self.codec.decode(body)
msg = BaseMsg.load(msg,properties)
f,req = self.replies[msg.correlation_id]
try:
await req.recv_reply(f,msg)
except Exception as exc: # pragma: no cover
if not f.done():
f.set_exception(exc)
except Exception as exc:
await self.reply.channel.basic_reject(envelope.delivery_tag)
logger.exception("problem with message %s: %s", envelope.delivery_tag, body)
else:
logger.debug("ack message %s",envelope.delivery_tag)
await self.reply.channel.basic_client_ack(envelope.delivery_tag)
async def call(self,msg, timeout=None):
cfg = self.unit().config['amqp']
if timeout is None:
tn = getattr(msg,'_timer',None)
if tn is not None:
timeout = self.unit().config['amqp']['timeout'].get(tn,None)
if timeout is not None:
timeout = float(timeout)
assert isinstance(msg,_RequestMsg)
data,props = msg.dump(self)
if data == "":
data = "0"
else:
data = self.codec.encode(data)
if timeout is not None:
f = asyncio.Future(loop=self._loop)
id = msg.message_id
self.replies[id] = (f,msg)
logger.debug("Send %s to %s: %s", msg.name, cfg['exchanges'][msg._exchange], data)
await getattr(self,msg._exchange).channel.publish(data, cfg['exchanges'][msg._exchange], msg.name, properties=props)
if timeout is None:
return
try:
await asyncio.wait_for(f,timeout, loop=self._loop)
except asyncio.TimeoutError:
if isinstance(msg,PollMsg):
return msg.replies
raise # pragma: no cover
finally:
del self.replies[id]
return f.result()
async def register_rpc(self,rpc):
ch = self.rpc
cfg = self.unit().config['amqp']
assert rpc.queue is None
rpc.channel = await self.amqp.channel()
rpc.queue = await rpc.channel.queue_declare(cfg['queues']['rpc']+rpc.name.replace('.','_'), auto_delete=True, passive=False)
logger.debug("Chan %s: bind %s %s %s", ch.channel,cfg['exchanges']['rpc'], rpc.name, rpc.queue['queue'])
await rpc.channel.queue_bind(rpc.queue['queue'], cfg['exchanges']['rpc'], routing_key=rpc.name)
self.rpcs[rpc.uuid] = rpc
await rpc.channel.basic_qos(prefetch_count=1,prefetch_size=0,connection_global=False)
logger.debug("Chan %s: read %s", rpc.channel,rpc.queue['queue'])
callback=functools.partial(self._on_rpc,rpc)
callback._is_coroutine = True
await rpc.channel.basic_consume(queue_name=rpc.queue['queue'], callback=callback, consumer_tag=rpc.uuid)
async def unregister_rpc(self,rpc):
ch = self.rpc
cfg = self.unit().config['amqp']
if isinstance(rpc,str):
rpc = self.rpcs.pop(rpc)
else:
del self.rpcs[rpc.uuid]
assert rpc.queue is not None
logger.debug("Chan %s: unbind %s %s %s", ch.channel,cfg['exchanges']['rpc'], rpc.name, rpc.queue['queue'])
await rpc.channel.queue_unbind(rpc.queue['queue'], cfg['exchanges']['rpc'], routing_key=rpc.name)
logger.debug("Chan %s: noread %s", rpc.channel,rpc.queue['queue'])
await rpc.channel.basic_cancel(consumer_tag=rpc.uuid)
async def register_alert(self,rpc):
ch = self.alert
cfg = self.unit().config['amqp']
logger.debug("Chan %s: bind %s %s %s", ch.channel,cfg['exchanges']['alert'], rpc.name, ch.exchange)
await ch.channel.queue_bind(ch.queue['queue'], ch.exchange, routing_key=rpc.name)
self.alerts[rpc.name] = rpc
async def unregister_alert(self,rpc):
if isinstance(rpc,str):
rpc = self.alerts.pop(rpc)
else:
del self.alerts[rpc.name]
ch = self.alert
cfg = self.unit().config['amqp']
logger.debug("Chan %s: unbind %s %s %s", ch.channel,cfg['exchanges']['alert'], rpc.name, ch.exchange)
await ch.channel.queue_unbind(ch.queue['queue'], ch.exchange, routing_key=rpc.name)
async def close(self):
a,self.amqp = self.amqp,None
if a is not None:
try:
await a.close(timeout=1)
except Exception: # pragma: no cover
logger.exception("closing the connection")
self.amqp_transport = None
def _kill(self):
self.amqp = None
a,self.amqp_transport = self.amqp_transport,None
if a is not None:
try:
a.close()
except Exception: # pragma: no cover
logger.exception("killing the connection")
| smurfix/DaBroker | dabroker/unit/conn.py | Python | gpl-3.0 | 10,356 |
import asyncio
import time
import logging
import re
import os
import gc
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.schema import MetaData
import cloudbot
from cloudbot.client import Client
from cloudbot.config import Config
from cloudbot.reloader import PluginReloader
from cloudbot.plugin import PluginManager
from cloudbot.event import Event, CommandEvent, RegexEvent, EventType
from cloudbot.util import botvars, formatting
from cloudbot.clients.irc import IrcClient
logger = logging.getLogger("cloudbot")
def clean_name(n):
"""strip all spaces and capitalization
:type n: str
:rtype: str
"""
return re.sub('[^A-Za-z0-9_]+', '', n.replace(" ", "_"))
class CloudBot:
"""
:type start_time: float
:type running: bool
:type connections: list[Client | IrcClient]
:type data_dir: bytes
:type config: core.config.Config
:type plugin_manager: PluginManager
:type reloader: PluginReloader
:type db_engine: sqlalchemy.engine.Engine
:type db_factory: sqlalchemy.orm.session.sessionmaker
:type db_session: sqlalchemy.orm.scoping.scoped_session
:type db_metadata: sqlalchemy.sql.schema.MetaData
:type loop: asyncio.events.AbstractEventLoop
:type stopped_future: asyncio.Future
:param: stopped_future: Future that will be given a result when the bot has stopped.
"""
def __init__(self, loop=asyncio.get_event_loop()):
# basic variables
self.loop = loop
self.start_time = time.time()
self.running = True
# future which will be called when the bot stops
self.stopped_future = asyncio.Future(loop=self.loop)
# stores each bot server connection
self.connections = []
# for plugins
self.logger = logger
# declare and create data folder
self.data_dir = os.path.abspath('data')
if not os.path.exists(self.data_dir):
logger.debug("Data folder not found, creating.")
os.mkdir(self.data_dir)
# set up config
self.config = Config(self)
logger.debug("Config system initialised.")
# log developer mode
if cloudbot.dev_mode.get("plugin_reloading"):
logger.info("Enabling developer option: plugin reloading.")
if cloudbot.dev_mode.get("config_reloading"):
logger.info("Enabling developer option: config reloading.")
if cloudbot.dev_mode.get("console_debug"):
logger.info("Enabling developer option: console debug.")
if cloudbot.dev_mode.get("file_debug"):
logger.info("Enabling developer option: file debug")
# setup db
db_path = self.config.get('database', 'sqlite:///cloudbot.db')
self.db_engine = create_engine(db_path)
self.db_factory = sessionmaker(bind=self.db_engine)
self.db_session = scoped_session(self.db_factory)
self.db_metadata = MetaData()
# set botvars.metadata so plugins can access when loading
botvars.metadata = self.db_metadata
logger.debug("Database system initialised.")
# Bot initialisation complete
logger.debug("Bot setup completed.")
# create bot connections
self.create_connections()
if cloudbot.dev_mode.get("plugin_reloading"):
self.reloader = PluginReloader(self)
self.plugin_manager = PluginManager(self)
def run(self):
"""
Starts CloudBot.
This will load plugins, connect to IRC, and process input.
:return: True if CloudBot should be restarted, False otherwise
:rtype: bool
"""
# Initializes the bot, plugins and connections
self.loop.run_until_complete(self._init_routine())
# Wait till the bot stops. The stopped_future will be set to True to restart, False otherwise
restart = self.loop.run_until_complete(self.stopped_future)
self.loop.close()
return restart
def create_connections(self):
""" Create a BotConnection for all the networks defined in the config """
for conf in self.config['connections']:
# strip all spaces and capitalization from the connection name
readable_name = conf['name']
name = clean_name(readable_name)
nick = conf['nick']
server = conf['connection']['server']
port = conf['connection'].get('port', 6667)
self.connections.append(IrcClient(self, name, nick, config=conf, channels=conf['channels'],
readable_name=readable_name, server=server, port=port,
use_ssl=conf['connection'].get('ssl', False)))
logger.debug("[{}] Created connection.".format(readable_name))
@asyncio.coroutine
def stop(self, reason=None, *, restart=False):
"""quits all networks and shuts the bot down"""
logger.info("Stopping bot.")
if cloudbot.dev_mode.get("config_reloading"):
logger.debug("Stopping config reloader.")
self.config.stop()
if cloudbot.dev_mode.get("plugin_reloading"):
logger.debug("Stopping plugin reloader.")
self.reloader.stop()
for connection in self.connections:
if not connection.connected:
# Don't quit a connection that hasn't connected
continue
logger.debug("[{}] Closing connection.".format(connection.readable_name))
connection.quit(reason)
yield from asyncio.sleep(1.0) # wait for 'QUIT' calls to take affect
for connection in self.connections:
if not connection.connected:
# Don't close a connection that hasn't connected
continue
connection.close()
self.running = False
# Give the stopped_future a result, so that run() will exit
self.stopped_future.set_result(restart)
@asyncio.coroutine
def restart(self, reason=None):
"""shuts the bot down and restarts it"""
yield from self.stop(reason=reason, restart=True)
@asyncio.coroutine
def _init_routine(self):
# Load plugins
yield from self.plugin_manager.load_all(os.path.abspath("plugins"))
# If we we're stopped while loading plugins, cancel that and just stop
if not self.running:
logger.info("Killed while loading, exiting")
return
if cloudbot.dev_mode.get("plugin_reloading"):
# start plugin reloader
self.reloader.start(os.path.abspath("plugins"))
# Connect to servers
yield from asyncio.gather(*[conn.connect() for conn in self.connections], loop=self.loop)
# Run a manual garbage collection cycle, to clean up any unused objects created during initialization
gc.collect()
@asyncio.coroutine
def process(self, event):
"""
:type event: Event
"""
run_before_tasks = []
tasks = []
command_prefix = event.conn.config.get('command_prefix', '.')
# Raw IRC hook
for raw_hook in self.plugin_manager.catch_all_triggers:
# run catch-all coroutine hooks before all others - TODO: Make this a plugin argument
if not raw_hook.threaded:
run_before_tasks.append(
self.plugin_manager.launch(raw_hook, Event(hook=raw_hook, base_event=event)))
else:
tasks.append(self.plugin_manager.launch(raw_hook, Event(hook=raw_hook, base_event=event)))
if event.irc_command in self.plugin_manager.raw_triggers:
for raw_hook in self.plugin_manager.raw_triggers[event.irc_command]:
tasks.append(self.plugin_manager.launch(raw_hook, Event(hook=raw_hook, base_event=event)))
# Event hooks
if event.type in self.plugin_manager.event_type_hooks:
for event_hook in self.plugin_manager.event_type_hooks[event.type]:
tasks.append(self.plugin_manager.launch(event_hook, Event(hook=event_hook, base_event=event)))
if event.type is EventType.message:
# Commands
if event.chan.lower() == event.nick.lower(): # private message, no command prefix
command_re = r'(?i)^(?:[{}]?|{}[,;:]+\s+)(\w+)(?:$|\s+)(.*)'.format(command_prefix, event.conn.nick)
else:
command_re = r'(?i)^(?:[{}]|{}[,;:]+\s+)(\w+)(?:$|\s+)(.*)'.format(command_prefix, event.conn.nick)
match = re.match(command_re, event.content)
if match:
command = match.group(1).lower()
if command in self.plugin_manager.commands:
command_hook = self.plugin_manager.commands[command]
command_event = CommandEvent(hook=command_hook, text=match.group(2).strip(),
triggered_command=command, base_event=event)
tasks.append(self.plugin_manager.launch(command_hook, command_event))
else:
potential_matches = []
for potential_match, plugin in self.plugin_manager.commands.items():
if potential_match.startswith(command):
potential_matches.append((potential_match, plugin))
if potential_matches:
if len(potential_matches) == 1:
command_hook = potential_matches[0][1]
command_event = CommandEvent(hook=command_hook, text=match.group(2).strip(),
triggered_command=command, base_event=event)
tasks.append(self.plugin_manager.launch(command_hook, command_event))
else:
event.notice("Possible matches: {}".format(
formatting.get_text_list([command for command, plugin in potential_matches])))
# Regex hooks
for regex, regex_hook in self.plugin_manager.regex_hooks:
match = regex.search(event.content)
if match:
regex_event = RegexEvent(hook=regex_hook, match=match, base_event=event)
tasks.append(self.plugin_manager.launch(regex_hook, regex_event))
# Run the tasks
yield from asyncio.gather(*run_before_tasks, loop=self.loop)
yield from asyncio.gather(*tasks, loop=self.loop)
| Zarthus/CloudBotRefresh | cloudbot/bot.py | Python | gpl-3.0 | 10,636 |
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
'''
Simple web server that demonstrates how browser/server interactions
work for GET and POST requests. Use it as a starting point to create a
custom web server for handling specific requests but don't try to use
it for any production work.
You start by creating a simple index.html file in web directory
somewhere like you home directory: ~/www.
You then add an HTML file: ~/www/index.html. It can be very
simple. Something like this will do nicely:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>WebServer Test</title>
</head>
<body>
<p>Hello, world!</p>
</body>
</html>
At this point you have a basic web infrastructure with a single file
so you start the server and point to the ~/www root directory:
$ webserver.py -r ~/www
This will start the web server listening on your localhost on port
8080. You can change both the host name and the port using the --host
and --port options. See the on-line help for more information (-h,
--help).
If you do not specify a root directory, it will use the directory that
you started the server from.
Now go to your browser and enter http://0.0.0.0:8080 on the command
line and you will see your page.
Try entering http://0.0.0.0:8080/info to see some server information.
You can also use http://127.0.0.1.
By default the server allows you to see directory listings if there is
no index.html or index.htm file. You can disable this by specifying
the --no-dirlist option.
If you want to see a directory listing of a directory that contains a
index.html or index.htm directory, type three trailing backslashes in
the URL like this: http://foo/bar/spam///. This will not work if the
--no-dirlist option is specified.
The default logging level is "info". You can change it using the
"--level" option.
The example below shows how to use a number of the switches to run a
server for host foobar on port 8080 with no directory listing
capability and very little output serving files from ~/www:
$ hostname
foobar
$ webserver --host foobar --port 8080 --level warning --no-dirlist --rootdir ~/www
To daemonize a process, specify the -d or --daemonize option with a
process directory. That directory will contain the log (stdout), err
(stderr) and pid (process id) files for the daemon process. Here is an
example:
$ hostname
foobar
$ webserver --host foobar --port 8080 --level warning --no-dirlist --rootdir ~/www --daemonize ~/www/logs
$ ls ~/www/logs
webserver-foobar-8080.err webserver-foobar-8080.log webserver-foobar-8080.pid
'''
# LICENSE
# Copyright (c) 2015 Joe Linoff
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# VERSIONS
# 1.0 initial release
# 1.1 replace req with self in request handler, add favicon
# 1.2 added directory listings, added --no-dirlist, fixed plain text displays, logging level control, daemonize
VERSION = '1.2'
import argparse
import BaseHTTPServer
import cgi
import logging
import os
import sys
def make_request_handler_class(opts):
'''
Factory to make the request handler and add arguments to it.
It exists to allow the handler to access the opts.path variable
locally.
'''
class MyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
'''
Factory generated request handler class that contain
additional class variables.
'''
m_opts = opts
def do_HEAD(self):
'''
Handle a HEAD request.
'''
logging.debug('HEADER %s' % (self.path))
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def info(self):
'''
Display some useful server information.
http://127.0.0.1:8080/info
'''
self.wfile.write('<html>')
self.wfile.write(' <head>')
self.wfile.write(' <title>Server Info</title>')
self.wfile.write(' </head>')
self.wfile.write(' <body>')
self.wfile.write(' <table>')
self.wfile.write(' <tbody>')
self.wfile.write(' <tr>')
self.wfile.write(' <td>client_address</td>')
self.wfile.write(' <td>%r</td>' % (repr(self.client_address)))
self.wfile.write(' </tr>')
self.wfile.write(' <tr>')
self.wfile.write(' <td>command</td>')
self.wfile.write(' <td>%r</td>' % (repr(self.command)))
self.wfile.write(' </tr>')
self.wfile.write(' <tr>')
self.wfile.write(' <td>headers</td>')
self.wfile.write(' <td>%r</td>' % (repr(self.headers)))
self.wfile.write(' </tr>')
self.wfile.write(' <tr>')
self.wfile.write(' <td>path</td>')
self.wfile.write(' <td>%r</td>' % (repr(self.path)))
self.wfile.write(' </tr>')
self.wfile.write(' <tr>')
self.wfile.write(' <td>server_version</td>')
self.wfile.write(' <td>%r</td>' % (repr(self.server_version)))
self.wfile.write(' </tr>')
self.wfile.write(' <tr>')
self.wfile.write(' <td>sys_version</td>')
self.wfile.write(' <td>%r</td>' % (repr(self.sys_version)))
self.wfile.write(' </tr>')
self.wfile.write(' </tbody>')
self.wfile.write(' </table>')
self.wfile.write(' </body>')
self.wfile.write('</html>')
def do_GET(self):
'''
Handle a GET request.
'''
logging.debug('GET %s' % (self.path))
# Parse out the arguments.
# The arguments follow a '?' in the URL. Here is an example:
# http://example.com?arg1=val1
args = {}
idx = self.path.find('?')
if idx >= 0:
rpath = self.path[:idx]
args = urlparse.parse_qs(self.path[idx+1:])
else:
rpath = self.path
# Print out logging information about the path and args.
if 'content-type' in self.headers:
ctype, _ = cgi.parse_header(self.headers['content-type'])
logging.debug('TYPE %s' % (ctype))
logging.debug('PATH %s' % (rpath))
logging.debug('ARGS %d' % (len(args)))
if len(args):
i = 0
for key in sorted(args):
logging.debug('ARG[%d] %s=%s' % (i, key, args[key]))
i += 1
# Check to see whether the file is stored locally,
# if it is, display it.
# There is special handling for http://127.0.0.1/info. That URL
# displays some internal information.
if self.path == '/info' or self.path == '/info/':
self.send_response(200) # OK
self.send_header('Content-type', 'text/html')
self.end_headers()
self.info()
else:
# Get the file path.
path = MyRequestHandler.m_opts.rootdir + rpath
dirpath = None
logging.debug('FILE %s' % (path))
# If it is a directory look for index.html
# or process it directly if there are 3
# trailing slashed.
if rpath[-3:] == '///':
dirpath = path
elif os.path.exists(path) and os.path.isdir(path):
dirpath = path # the directory portion
index_files = ['/index.html', '/index.htm', ]
for index_file in index_files:
tmppath = path + index_file
if os.path.exists(tmppath):
path = tmppath
break
# Allow the user to type "///" at the end to see the
# directory listing.
if os.path.exists(path) and os.path.isfile(path):
# This is valid file, send it as the response
# after determining whether it is a type that
# the server recognizes.
_, ext = os.path.splitext(path)
ext = ext.lower()
content_type = {
'.css': 'text/css',
'.gif': 'image/gif',
'.htm': 'text/html',
'.html': 'text/html',
'.jpeg': 'image/jpeg',
'.jpg': 'image/jpg',
'.js': 'text/javascript',
'.png': 'image/png',
'.text': 'text/plain',
'.txt': 'text/plain',
}
# If it is a known extension, set the correct
# content type in the response.
if ext in content_type:
self.send_response(200) # OK
self.send_header('Content-type', content_type[ext])
self.end_headers()
with open(path) as ifp:
self.wfile.write(ifp.read())
else:
# Unknown file type or a directory.
# Treat it as plain text.
self.send_response(200) # OK
self.send_header('Content-type', 'text/plain')
self.end_headers()
with open(path) as ifp:
self.wfile.write(ifp.read())
else:
if dirpath is None or self.m_opts.no_dirlist == True:
# Invalid file path, respond with a server access error
self.send_response(500) # generic server error for now
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html>')
self.wfile.write(' <head>')
self.wfile.write(' <title>Server Access Error</title>')
self.wfile.write(' </head>')
self.wfile.write(' <body>')
self.wfile.write(' <p>Server access error.</p>')
self.wfile.write(' <p>%r</p>' % (repr(self.path)))
self.wfile.write(' <p><a href="%s">Back</a></p>' % (rpath))
self.wfile.write(' </body>')
self.wfile.write('</html>')
else:
# List the directory contents. Allow simple navigation.
logging.debug('DIR %s' % (dirpath))
self.send_response(200) # OK
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write('<html>')
self.wfile.write(' <head>')
self.wfile.write(' <title>%s</title>' % (dirpath))
self.wfile.write(' </head>')
self.wfile.write(' <body>')
self.wfile.write(' <a href="%s">Home</a><br>' % ('/'));
# Make the directory path navigable.
dirstr = ''
href = None
for seg in rpath.split('/'):
if href is None:
href = seg
else:
href = href + '/' + seg
dirstr += '/'
dirstr += '<a href="%s">%s</a>' % (href, seg)
self.wfile.write(' <p>Directory: %s</p>' % (dirstr))
# Write out the simple directory list (name and size).
self.wfile.write(' <table border="0">')
self.wfile.write(' <tbody>')
fnames = ['..']
fnames.extend(sorted(os.listdir(dirpath), key=str.lower))
for fname in fnames:
self.wfile.write(' <tr>')
self.wfile.write(' <td align="left">')
path = rpath + '/' + fname
fpath = os.path.join(dirpath, fname)
if os.path.isdir(path):
self.wfile.write(' <a href="%s">%s/</a>' % (path, fname))
else:
self.wfile.write(' <a href="%s">%s</a>' % (path, fname))
self.wfile.write(' <td> </td>')
self.wfile.write(' </td>')
self.wfile.write(' <td align="right">%d</td>' % (os.path.getsize(fpath)))
self.wfile.write(' </tr>')
self.wfile.write(' </tbody>')
self.wfile.write(' </table>')
self.wfile.write(' </body>')
self.wfile.write('</html>')
def do_POST(self):
'''
Handle POST requests.
'''
logging.debug('POST %s' % (self.path))
# CITATION: http://stackoverflow.com/questions/4233218/python-basehttprequesthandler-post-variables
ctype, pdict = cgi.parse_header(self.headers['content-type'])
if ctype == 'multipart/form-data':
postvars = cgi.parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers['content-length'])
postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1)
else:
postvars = {}
# Get the "Back" link.
back = self.path if self.path.find('?') < 0 else self.path[:self.path.find('?')]
# Print out logging information about the path and args.
logging.debug('TYPE %s' % (ctype))
logging.debug('PATH %s' % (self.path))
logging.debug('ARGS %d' % (len(postvars)))
if len(postvars):
i = 0
for key in sorted(postvars):
logging.debug('ARG[%d] %s=%s' % (i, key, postvars[key]))
i += 1
# Tell the browser everything is okay and that there is
# HTML to display.
self.send_response(200) # OK
self.send_header('Content-type', 'text/html')
self.end_headers()
# Display the POST variables.
self.wfile.write('<html>')
self.wfile.write(' <head>')
self.wfile.write(' <title>Server POST Response</title>')
self.wfile.write(' </head>')
self.wfile.write(' <body>')
self.wfile.write(' <p>POST variables (%d).</p>' % (len(postvars)))
if len(postvars):
# Write out the POST variables in 3 columns.
self.wfile.write(' <table>')
self.wfile.write(' <tbody>')
i = 0
for key in sorted(postvars):
i += 1
val = postvars[key]
self.wfile.write(' <tr>')
self.wfile.write(' <td align="right">%d</td>' % (i))
self.wfile.write(' <td align="right">%s</td>' % key)
self.wfile.write(' <td align="left">%s</td>' % val)
self.wfile.write(' </tr>')
self.wfile.write(' </tbody>')
self.wfile.write(' </table>')
self.wfile.write(' <p><a href="%s">Back</a></p>' % (back))
self.wfile.write(' </body>')
self.wfile.write('</html>')
return MyRequestHandler
def err(msg):
'''
Report an error message and exit.
'''
print('ERROR: %s' % (msg))
sys.exit(1)
def getopts():
'''
Get the command line options.
'''
# Get the help from the module documentation.
this = os.path.basename(sys.argv[0])
description = ('description:%s' % '\n '.join(__doc__.split('\n')))
epilog = ' '
rawd = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(formatter_class=rawd,
description=description,
epilog=epilog)
parser.add_argument('-d', '--daemonize',
action='store',
type=str,
default='.',
metavar='DIR',
help='daemonize this process, store the 3 run files (.log, .err, .pid) in DIR (default "%(default)s")')
parser.add_argument('-H', '--host',
action='store',
type=str,
default='localhost',
help='hostname, default=%(default)s')
parser.add_argument('-l', '--level',
action='store',
type=str,
default='info',
choices=['notset', 'debug', 'info', 'warning', 'error', 'critical',],
help='define the logging level, the default is %(default)s')
parser.add_argument('--no-dirlist',
action='store_true',
help='disable directory listings')
parser.add_argument('-p', '--port',
action='store',
type=int,
default=8080,
help='port, default=%(default)s')
parser.add_argument('-r', '--rootdir',
action='store',
type=str,
default=os.path.abspath('.'),
help='web directory root that contains the HTML/CSS/JS files %(default)s')
parser.add_argument('-v', '--verbose',
action='count',
help='level of verbosity')
parser.add_argument('-V', '--version',
action='version',
version='%(prog)s - v' + VERSION)
opts = parser.parse_args()
opts.rootdir = os.path.abspath(opts.rootdir)
if not os.path.isdir(opts.rootdir):
err('Root directory does not exist: ' + opts.rootdir)
if opts.port < 1 or opts.port > 65535:
err('Port is out of range [1..65535]: %d' % (opts.port))
return opts
def httpd(opts):
'''
HTTP server
'''
RequestHandlerClass = make_request_handler_class(opts)
server = BaseHTTPServer.HTTPServer((opts.host, opts.port), RequestHandlerClass)
logging.info('Server starting %s:%s (level=%s)' % (opts.host, opts.port, opts.level))
try:
server.serve_forever()
except KeyboardInterrupt:
pass
server.server_close()
logging.info('Server stopping %s:%s' % (opts.host, opts.port))
def get_logging_level(opts):
'''
Get the logging levels specified on the command line.
The level can only be set once.
'''
if opts.level == 'notset':
return logging.NOTSET
elif opts.level == 'debug':
return logging.DEBUG
elif opts.level == 'info':
return logging.INFO
elif opts.level == 'warning':
return logging.WARNING
elif opts.level == 'error':
return logging.ERROR
elif opts.level == 'critical':
return logging.CRITICAL
def daemonize(opts):
'''
Daemonize this process.
CITATION: http://stackoverflow.com/questions/115974/what-would-be-the-simplest-way-to-daemonize-a-python-script-in-linux
'''
if os.path.exists(opts.daemonize) is False:
err('directory does not exist: ' + opts.daemonize)
if os.path.isdir(opts.daemonize) is False:
err('not a directory: ' + opts.daemonize)
bname = 'webserver-%s-%d' % (opts.host, opts.port)
outfile = os.path.abspath(os.path.join(opts.daemonize, bname + '.log'))
errfile = os.path.abspath(os.path.join(opts.daemonize, bname + '.err'))
pidfile = os.path.abspath(os.path.join(opts.daemonize, bname + '.pid'))
if os.path.exists(pidfile):
err('pid file exists, cannot continue: ' + pidfile)
if os.path.exists(outfile):
os.unlink(outfile)
if os.path.exists(errfile):
os.unlink(errfile)
if os.fork():
sys.exit(0) # exit the parent
os.umask(0)
os.setsid()
if os.fork():
sys.exit(0) # exit the parent
print('daemon pid %d' % (os.getpid()))
sys.stdout.flush()
sys.stderr.flush()
stdin = file('/dev/null', 'r')
stdout = file(outfile, 'a+')
stderr = file(errfile, 'a+', 0)
os.dup2(stdin.fileno(), sys.stdin.fileno())
os.dup2(stdout.fileno(), sys.stdout.fileno())
os.dup2(stderr.fileno(), sys.stderr.fileno())
with open(pidfile, 'w') as ofp:
ofp.write('%i' % (os.getpid()))
def main():
''' main entry '''
opts = getopts()
if opts.daemonize:
daemonize(opts)
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', level=get_logging_level(opts))
httpd(opts)
if __name__ == '__main__':
main() # this allows library functionality
| CohibAA/simple-python-web-server | webserver.py | Python | mit | 22,978 |
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import int_or_none
class TOnlineIE(InfoExtractor):
IE_NAME = 't-online.de'
_VALID_URL = r'https?://(?:www\.)?t-online\.de/tv/(?:[^/]+/)*id_(?P<id>\d+)'
_TEST = {
'url': 'http://www.t-online.de/tv/sport/fussball/id_79166266/drittes-remis-zidane-es-muss-etwas-passieren-.html',
'md5': '7d94dbdde5f9d77c5accc73c39632c29',
'info_dict': {
'id': '79166266',
'ext': 'mp4',
'title': 'Drittes Remis! Zidane: "Es muss etwas passieren"',
'description': 'Es läuft nicht rund bei Real Madrid. Das 1:1 gegen den SD Eibar war das dritte Unentschieden in Folge in der Liga.',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
video_data = self._download_json(
'http://www.t-online.de/tv/id_%s/tid_json_video' % video_id, video_id)
title = video_data['subtitle']
formats = []
for asset in video_data.get('assets', []):
asset_source = asset.get('source') or asset.get('source2')
if not asset_source:
continue
formats_id = []
for field_key in ('type', 'profile'):
field_value = asset.get(field_key)
if field_value:
formats_id.append(field_value)
formats.append({
'format_id': '-'.join(formats_id),
'url': asset_source,
})
thumbnails = []
for image in video_data.get('images', []):
image_source = image.get('source')
if not image_source:
continue
thumbnails.append({
'url': image_source,
})
return {
'id': video_id,
'title': title,
'description': video_data.get('description'),
'duration': int_or_none(video_data.get('duration')),
'thumbnails': thumbnails,
'formats': formats,
}
| valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/extractor/tonline.py | Python | gpl-3.0 | 1,712 |
"""
Copyright (c) 2015 Andreea Georgescu
Created on Sun Mar 1 19:51:42 2015
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from experiment import *
from experiment_HaloIndep_er import *
import parallel_map as par
from scipy.linalg import det, inv
# from scipy.special import lambertw
from lambertw import *
from scipy.optimize import brentq
import os
class Experiment_HaloIndep(Experiment):
""" Base class that implements the halo-independent analysis common to all
experiments, using vmin as independent variable in the integration.
Input:
exper_name: string
Name of experiment.
scattering_type: string
Type of scattering. Can be
- 'SI' (spin-independent)
- 'SDAV' (spin-independent, axial-vector)
- 'SDPS' (spin-independent, pseudo-scalar)
mPhi: float, optional
The mass of the mediator. If not given, it corresponds to contact interaction.
"""
def __init__(self, exper_name, scattering_type, mPhi=mPhiRef):
Experiment.__init__(self, exper_name, scattering_type, mPhi)
if self.energy_resolution_type == "Dirac":
self.Response = self._Response_Dirac
else:
self.Response = self._Response_Finite
def DifferentialResponse(self, Eee, qER, const_factor):
""" Differential response function d**2 R / (d Eee d ER)
NOT including the velocity integral eta0
Input:
Eee: float or ndarray
Measured energy (electron equivalent).
qER: float or ndarray
q * ER for quenching factor q and recoil energy ER.
const_factor: ndarray
Factors entering the differential response that do not depend on Eee.
"""
self.count_diffresponse_calls += 1
r_list = const_factor * self.Efficiency(Eee) * \
np.array([self.ResolutionFunction(Eee, qer, self.EnergyResolution(qer))
for qer in qER])
return r_list.sum()
def ConstFactor(self, vmin, mx, fp, fn, delta, sign):
""" Collects the factors that don't depend on the measured energy Eee,
so they only need to be computed once in Response function.
Returns:
(ER, qER, const_factor): tuple
"""
ER = ERecoilBranch(vmin, self.mT, mx, delta, sign)
q = self.QuenchingFactor(ER)
qER = q * ER
efficiencyER = self.Efficiency_ER(ER)
const_factor = kilogram/SpeedOfLight**2 * \
self.CrossSectionFactors(ER, mx, fp, fn, delta) * \
np.abs(dERecoildVmin(vmin, self.mT, mx, delta, sign)) * efficiencyER
return (ER, qER, const_factor)
def DifferentialResponse_Full(self, vmin, Eee, mx, fp, fn, delta, sign):
""" Differential response function d**2 R / (d Eee d ER)
NOT including the velocity integral eta0
Same as DifferentialResponse, but computed given full input parameters,
instead of the pre-computed const_factor.
"""
(ER, qER, const_factor) = self.ConstFactor(vmin, mx, fp, fn, delta, sign)
return self.DifferentialResponse(Eee, qER, const_factor)
def _Response_Finite(self, vmin, Eee1, Eee2, mx, fp, fn, delta):
""" Response function integral d**2 R / (d Eee d ER) between measured energies
Eee1 and Eee2.
NOT including eta0.
For any finite resolution function (i.e. other than Dirac Delta).
"""
self.count_response_calls += 1
if delta == 0:
branches = [1]
else:
branches = [1, -1]
result = 0
for sign in branches:
(ER, qER, const_factor) = self.ConstFactor(vmin, mx, fp, fn, delta, sign)
result += integrate.quad(self.DifferentialResponse, Eee1, Eee2,
args=(qER, const_factor),
epsrel=PRECISSION, epsabs=0)[0]
if result >= 0:
return result
return 0
def _Response_Dirac(self, vmin, Eee1, Eee2, mx, fp, fn, delta):
""" Response function integral d**2 R / (d Eee d ER) between measured energies
Eee1 and Eee2.
NOT including eta0.
For Dirac Delta resolution function.
"""
self.count_response_calls += 1
if delta == 0:
branches = [1]
else:
branches = [1, -1]
r_list_sum = 0
for sign in branches:
ER = ERecoilBranch(vmin, self.mT, mx, delta, sign)
q = self.QuenchingFactor(ER)
qER = q * ER
integrated_delta = np.array([1. if Eee1 <= i < Eee2 else 0. for i in qER])
efficiencyEee = self.Efficiency(Eee1, qER)
# efficiencyER = self.Efficiency_ER(qER)
efficiencyER = np.array(list(map(self.Efficiency_ER, qER)))
r_list = kilogram/SpeedOfLight**2 * \
self.CrossSectionFactors(ER, mx, fp, fn, delta) * \
np.abs(dERecoildVmin(vmin, self.mT, mx, delta, sign)) * \
efficiencyEee * efficiencyER * integrated_delta
r_list_sum += r_list.sum()
return r_list_sum
def IntegratedResponse(self, vmin1, vmin2, Eee1, Eee2, mx, fp, fn, delta):
""" Integrated Response Function between measured energies Eee1 and Eee2,
and all recoil energies ER.
NOT including eta0.
For any finite resolution function (i.e. other than Dirac Delta).
"""
midpoints = []
integr = integrate.quad(self.Response, vmin1, vmin2,
args=(Eee1, Eee2, mx, fp, fn, delta), points=midpoints,
epsrel=PRECISSION, epsabs=0)
return integr[0]
class MaxGapExperiment_HaloIndep(Experiment_HaloIndep):
""" Class for experiments using the Maximum Gap Method.
Input:
exper_name: string
Name of experiment.
scattering_type: string
Type of scattering.
mPhi: float, optional
Mass of the mediator.
quenching_factor: float, optional
Quenching factor. If not given, the default used is specified in the data
modules.
"""
def __init__(self, exper_name, scattering_type, mPhi=mPhiRef, quenching_factor=None):
super().__init__(exper_name, scattering_type, mPhi)
module = import_file(INPUT_DIR + exper_name + ".py")
self.ERecoilList = module.ERecoilList
self.ElistMaxGap = np.append(np.insert(
np.array(list(filter(lambda x: self.Ethreshold < x < self.Emaximum,
self.ERecoilList))),
0, self.Ethreshold), self.Emaximum)
def TabulateMaximumGapLimit(self, vmin1, vmin2, mx, fp, fn, delta):
print("vmin = ", vmin2)
return np.array(list(map(lambda i, j:
self.IntegratedResponse(vmin1, vmin2, i, j, mx, fp, fn, delta),
self.ElistMaxGap[:-1], self.ElistMaxGap[1:])))
def MaximumGapUpperBound(self, vmin_min, vmin_max, vmin_step, mx, fp, fn, delta,
output_file, processes=None):
vmin_list = np.linspace(vmin_min, vmin_max, (vmin_max - vmin_min)/vmin_step + 1)
vmin_list0 = np.insert(vmin_list, 0, 0.)
xtable = np.zeros(self.ElistMaxGap.size - 1)
upperlimit_table = np.array([])
kwargs = ({'vmin1': vmin_list0[v_index], 'vmin2': vmin_list[v_index],
'mx': mx, 'fp': fp, 'fn': fn, 'delta': delta}
for v_index in range(vmin_list.size))
xtable_list = par.parmap(self.TabulateMaximumGapLimit, kwargs, processes)
for v_index in range(vmin_list.size):
xtable += xtable_list[v_index]
mu_scaled = xtable.sum()
x_scaled = np.max(xtable)
if x_scaled == 0:
mu_over_x = np.inf
result = [np.inf]
else:
mu_over_x = mu_scaled / x_scaled
y_guess = np.real(-lambertw(-0.1 / mu_over_x, -1))
y = fsolve(lambda x: MaximumGapC0scaled(x, mu_over_x) - ConfidenceLevel,
y_guess)
result = y / x_scaled / self.Exposure
result = result[0]
print("vmin = ", vmin_list[v_index], " mu_over_x = ", mu_over_x)
print("xtable = ", xtable)
print("mu_over_x =", mu_over_x)
print("y_guess =", y_guess)
print("y =", y)
print("x_scaled =", x_scaled)
print("result = ", result)
to_print = np.log10(np.array([[mx, result]]))
with open(output_file, 'ab') as f_handle:
np.savetxt(f_handle, to_print)
upperlimit_table = np.append(upperlimit_table, [result])
return upperlimit_table
def UpperLimit(self, mx, fp, fn, delta, vmin_min, vmin_max, vmin_step,
output_file, processes=None, **unused_kwargs):
upper_limit = self.MaximumGapUpperBound(vmin_min, vmin_max, vmin_step, mx,
fp, fn, delta, output_file,
processes=processes)
vmin_list = np.linspace(vmin_min, vmin_max, (vmin_max - vmin_min)/vmin_step + 1)
print("vmin_list = ", vmin_list)
print("upper_limit = ", upper_limit)
result = np.transpose([vmin_list, np.log10(upper_limit)])
print("res = ", result)
return result[result[:, 1] != np.inf]
class PoissonExperiment_HaloIndep(Experiment_HaloIndep):
""" Class for experiments with Poisson analysis.
Input:
exper_name: string
Name of experiment.
scattering_type: string
Type of scattering.
mPhi: float, optional
Mass of the mediator.
quenching_factor: float, optional
Quenching factor. If not given, the default used is specified in the data
modules.
"""
def __init__(self, exper_name, scattering_type, mPhi=mPhiRef, quenching_factor=None):
super().__init__(exper_name, scattering_type, mPhi)
module = import_file(INPUT_DIR + exper_name + ".py")
self.Expected_limit = module.Expected_limit
def _PoissonUpperBound(self, vmin, mx, fp, fn, delta):
print('vmin =', vmin)
muT = self.mT * mx / (self.mT + mx)
Eee_max = max(2e6 * muT**2 * (vmin/SpeedOfLight)**2 / self.mT)
print("self.Ethreshold =", self.Ethreshold)
print("Eee_max =", Eee_max)
int_response = self.IntegratedResponse(0, vmin, self.Ethreshold, Eee_max,
mx, fp, fn, delta)
print("int_response =", int_response)
if int_response > 0:
result = np.log10(self.Expected_limit / self.Exposure / int_response)
else:
result = np.inf
return [vmin, result]
def UpperLimit(self, mx, fp, fn, delta, vmin_min, vmin_max, vmin_step,
output_file, processes=None, **unused_kwargs):
vmin_list = np.linspace(vmin_min, vmin_max, (vmin_max - vmin_min)/vmin_step + 1)
kwargs = ({'vmin': vmin, 'mx': mx, 'fp': fp, 'fn': fn, 'delta': delta}
for vmin in vmin_list)
upper_limit = np.array(par.parmap(self._PoissonUpperBound, kwargs, processes))
upper_limit = upper_limit[upper_limit[:, 1] != np.inf]
print("upper_limit = ", upper_limit)
with open(output_file, 'ab') as f_handle:
np.savetxt(f_handle, upper_limit)
return upper_limit
class GaussianExperiment_HaloIndep(Experiment_HaloIndep):
""" Class for experiments with Gaussian analysis.
Input:
exper_name: string
Name of experiment.
scattering_type: string
Type of scattering.
mPhi: float, optional
Mass of the mediator.
quenching_factor: float, optional
Quenching factor. If not given, the default used is specified in the data
modules.
"""
def __init__(self, exper_name, scattering_type, mPhi=mPhiRef, quenching_factor=None):
super().__init__(exper_name, scattering_type, mPhi)
module = import_file(INPUT_DIR + exper_name + ".py")
self.BinEdges_left = module.BinEdges_left
self.BinEdges_right = module.BinEdges_right
self.BinData = module.BinData
self.BinError = module.BinError
self.BinSize = module.BinSize
self.chiSquared = chi_squared(self.BinData.size)
self.Expected_limit = module.Expected_limit * self.BinSize
if quenching_factor is not None:
self.QuenchingFactor = lambda e: quenching_factor
print('BinData', self.BinData)
print('BinError', self.BinError)
def _GaussianUpperBound(self, vmin, mx, fp, fn, delta):
int_response = \
np.array(list(map(lambda i, j:
self.IntegratedResponse(0, vmin, i, j, mx, fp, fn, delta),
self.BinEdges_left, self.BinEdges_right)))
result = [i for i in self.Expected_limit / int_response if i > 0]
result = np.min(result)
if result > 0:
result = np.log10(result)
else:
result = np.inf
print("(vmin, result) =", (vmin, result))
return [vmin, result]
def UpperLimit(self, mx, fp, fn, delta, vmin_min, vmin_max, vmin_step,
output_file, processes=None, **unused_kwargs):
vmin_list = np.linspace(vmin_min, vmin_max, (vmin_max - vmin_min)/vmin_step + 1)
kwargs = ({'vmin': vmin, 'mx': mx, 'fp': fp, 'fn': fn, 'delta': delta}
for vmin in vmin_list)
upper_limit = np.array(par.parmap(self._GaussianUpperBound, kwargs, processes))
upper_limit = upper_limit[upper_limit[:, 1] != np.inf]
print("upper_limit = ", upper_limit)
with open(output_file, 'ab') as f_handle:
np.savetxt(f_handle, upper_limit)
return upper_limit
class Crosses_HaloIndep(Experiment_HaloIndep):
""" Class for finding the crosses for experients with potential signal and
binned data.
Input:
exper_name: string
Name of experiment.
scattering_type: string
Type of scattering.
mPhi: float, optional
Mass of the mediator.
quenching_factor: float, optional
Quenching factor. If not given, the default used is specified in the data
modules.
"""
def __init__(self, exper_name, scattering_type, mPhi=mPhiRef, quenching_factor=None):
super().__init__(exper_name, scattering_type, mPhi)
module = import_file(INPUT_DIR + exper_name + ".py")
self.BinEdges = module.BinEdges
self.BinEdges_left = self.BinEdges[:-1]
self.BinEdges_right = self.BinEdges[1:]
self.BinData = module.BinData
self.BinError = module.BinError
self.QuenchingFactorOfEee = module.QuenchingFactorOfEee
if quenching_factor is not None:
self.QuenchingFactor = lambda e: quenching_factor
self._int_resp = self.IntegratedResponse
def _VminRange(self, E1, E2, mT, mx, delta):
E_delta = - delta * mx / (mT + mx)
vmin_of_E1 = VMin(E1, mT, mx, delta)
vmin_of_E2 = VMin(E2, mT, mx, delta)
print(vmin_of_E1, vmin_of_E2)
if E1 <= E_delta and E2 >= E_delta:
vmin_min = 0
else:
vmin_min = min(vmin_of_E1, vmin_of_E2)
vmin_max = max(vmin_of_E1, vmin_of_E2)
return (vmin_min, vmin_max)
def _AverageOverNuclides(self, quantity):
return np.sum(quantity * self.mass_fraction) / np.sum(self.mass_fraction)
def _Box(self, Eee1, Eee2, mT_avg, mx, fp, fn, delta, vmax, output_file=None):
print(self.name)
print('Eee1 =', Eee1, ' Eee2 =', Eee2)
dvmin = 1
if delta <= 0:
vmin_list = np.linspace(0, vmax, (vmax + dvmin)/dvmin)
resp_list = [self.Response(vmin1, Eee1, Eee2, mx, fp, fn, delta)
for vmin1 in vmin_list[:-1]] + [0.001]
else:
vdelta = min(VminDelta(self.mT, mx, delta))
print('vdelta =', vdelta)
vdelta = max(0, vdelta // dvmin * dvmin - dvmin)
vmin_list = np.linspace(vdelta, vmax, (vmax - vdelta + dvmin)/dvmin)
resp_list = [self.IntegratedResponse(vmin1, vmin2, Eee1, Eee2,
mx, fp, fn, delta)/dvmin
for vmin1, vmin2 in zip(vmin_list[:-1], vmin_list[1:])] + [0.001]
plt.close()
plt.plot(vmin_list, resp_list, '-')
int_resp = sum(resp_list) * dvmin
index_center = np.argmax(resp_list)
vmin_center = vmin_list[index_center]
resp_max = resp_list[index_center]
resp_min = max(resp_list[0], resp_list[-1])
if output_file is not None:
output_file = output_file.replace('temp.dat', self.name + '_' + str(Eee1) +
'_' + str(Eee2) + '.dat')
print(output_file)
np.savetxt(output_file,
np.transpose([vmin_list, np.array(resp_list)/int_resp]))
output_file = output_file.replace('.dat', '_notnorm.dat')
print(output_file)
np.savetxt(output_file, np.transpose([vmin_list, resp_list]))
if index_center > 0:
int_resp_left = \
interpolate.interp1d(resp_list[index_center::-1],
dvmin * np.cumsum(resp_list[index_center::-1]))
else:
def int_resp_left(r): return 0
if index_center < len(resp_list) - 1:
int_resp_right = \
interpolate.interp1d(resp_list[index_center:],
dvmin * np.cumsum(resp_list[index_center:]))
else:
def int_resp_right(r): return 0
print('resp_max =', resp_max)
print('resp_min =', resp_min)
print('int_resp =', int_resp)
def integrated_response(r):
return int_resp_left(r) + int_resp_right(r) - resp_max -\
ConfidenceLevel * int_resp
print(integrated_response(resp_min * 1.1), integrated_response(resp_max * 0.9))
response_CL = brentq(integrated_response, resp_min * 1.1, resp_max * 0.9)
print('response_CL =', response_CL)
plt.plot(vmin_list, response_CL * np.ones_like(vmin_list), '-')
vmin_interp_left = interpolate.interp1d(resp_list[:index_center + 1],
vmin_list[:index_center + 1])
vmin_interp_right = interpolate.interp1d(resp_list[index_center:],
vmin_list[index_center:])
vmin_error_left = - vmin_interp_left(response_CL) + vmin_center
vmin_error_right = vmin_interp_right(response_CL) - vmin_center
print('vmin_edges =',
VMin(Eee1/self.QuenchingFactor(Eee1), self.mT, mx, delta)[0],
VMin(Eee2/self.QuenchingFactor(Eee2), self.mT, mx, delta)[0])
print('vmin_interp =', vmin_interp_left(response_CL),
vmin_interp_right(response_CL))
print('vmin_center =', vmin_center)
print('vmin_error =', vmin_error_left, vmin_error_right)
# os.system("say 'Plot'")
# plt.show()
return (int_resp, vmin_center, vmin_error_left, vmin_error_right)
def _Boxes(self, mx, fp, fn, delta, vmax=2000, processes=None, output_file=None):
mT_avg = np.sum(self.mT * self.mass_fraction) / np.sum(self.mass_fraction)
print("mT_avg =", mT_avg)
print('vmax =', vmax)
kwargs = ({'Eee1': Eee1, 'Eee2': Eee2, 'mT_avg': mT_avg,
'mx': mx, 'fp': fp, 'fn': fn, 'delta': delta, 'vmax': vmax,
'output_file': output_file}
for Eee1, Eee2 in zip(self.BinEdges_left, self.BinEdges_right))
return np.array(par.parmap(self._Box, kwargs, processes))
def _Rebin(self, index=9):
self.BinEdges = np.append(self.BinEdges[:index + 1], self.BinEdges[-1])
data, error = Rebin_data(self.BinData[index:], self.BinError[index:])
self.BinData = np.append(self.BinData[:index], data)
self.BinError = np.append(self.BinError[:index], error)
print('BinEdges =', self.BinEdges)
print('BinData =', self.BinData)
print('BinError =', self.BinError)
self.BinEdges_left = self.BinEdges[:-1]
self.BinEdges_right = self.BinEdges[1:]
def UpperLimit(self, mx, fp, fn, delta, vmin_min, vmin_max, vmin_step,
output_file, rebin=False, processes=None, **unused_kwargs):
if rebin:
self._Rebin()
box_table = self._Boxes(mx, fp, fn, delta, vmax=vmin_max, processes=processes)
int_resp_list = box_table[:, 0]
vmin_center_list = box_table[:, 1]
vmin_error_left_list = box_table[:, 2]
vmin_error_right_list = box_table[:, 3]
eta_list = self.BinData / int_resp_list
eta_error_list = self.BinError / int_resp_list
print('Bin Data', self.BinData)
print('Bin Error', self.BinError)
print('eta error', eta_list)
print('eta error list', eta_error_list)
result = np.array([int_resp_list, vmin_center_list, vmin_error_left_list,
vmin_error_right_list, eta_list, eta_error_list])
print(result)
with open(output_file, 'ab') as f_handle:
np.savetxt(f_handle, result)
return result
def IntResponseMatrix(self, mx, fp, fn, delta, vmin_min, vmin_max, vmin_step,
output_file, processes=None):
np.set_printoptions(threshold=np.nan)
vmin_list = np.linspace(vmin_min, vmin_max, (vmin_max - vmin_min)/vmin_step + 1)
kwargs = ({'vmin1': vmin1, 'vmin2': vmin2,
'Eee1': Eee1, 'Eee2': Eee2,
'mx': mx, 'fp': fp, 'fn': fn, 'delta': delta}
for vmin1, vmin2 in zip(vmin_list[:-1], vmin_list[1:])
for Eee1, Eee2 in zip(self.BinEdges_left, self.BinEdges_right))
matr = [self._int_resp(**k) for k in kwargs]
matr = np.reshape(matr, (len(vmin_list)-1, len(self.BinEdges_left)))
print('matrix =')
print(matr)
print(np.shape(matr))
print('determinant =', det(matr))
print('inverse =')
print(inv(matr))
with open(output_file, 'ab') as f_handle:
np.savetxt(f_handle, matr)
return matr
class Crosses_HaloIndep_Combined(Crosses_HaloIndep, Experiment_HaloIndep):
""" Class for finding the best-fit regions for the DAMA experiment
when considering the combined analysis of Na and I.
Constructor:
A list or tuple of 2 experiment names must be given, and, if not None, then
a list or tuple of 2 quenching_factors, one for Na and one for I.
"""
def __init__(self, exper_name, scattering_type, mPhi=mPhiRef, quenching_factor=None):
exper_name = exper_name.split()
super().__init__(exper_name[0], scattering_type, mPhi)
self.other = self.__class__.__bases__[0](exper_name[1], scattering_type, mPhi)
if quenching_factor is not None:
self.QuenchingFactor = lambda e: quenching_factor[0]
self.other.QuenchingFactor = lambda e: quenching_factor[1]
def _int_resp(self, vmin1, vmin2, Eee1, Eee2, mx, fp, fn, delta):
return self.IntegratedResponse(vmin1, vmin2, Eee1, Eee2, mx, fp, fn, delta) \
+ self.other.IntegratedResponse(vmin1, vmin2, Eee1, Eee2, mx, fp, fn, delta)
def _Rebin(self, initial_energy_bin, vmax, mx, num_rebinned_bins=19):
# build the new self.BinEdges_left and self.BinEdges_right
self.BinEdges_left = [initial_energy_bin[0]]
self.BinEdges_right = [initial_energy_bin[1]]
ratio = ERecoil_ratio(self.mT, self.other.mT, mx,
self.QuenchingFactor(0), self.other.QuenchingFactor(0))
ratio = round(ratio[0], 1)
print('ratio =', ratio)
vmin_left_edge = VMin(self.BinEdges_left[-1]/self.QuenchingFactor(0),
self.mT[0], mx, 0)
print('vmax =', vmax)
print('vmin_left_edge =', vmin_left_edge)
while vmin_left_edge < vmax:
self.BinEdges_left.append(self.BinEdges_left[-1] * ratio)
self.BinEdges_right.append(self.BinEdges_right[-1] * ratio)
vmin_left_edge = VMin(self.BinEdges_left[-1]/self.QuenchingFactor(0),
self.mT[0], mx, 0)
print('vmin_left_edge =', vmin_left_edge)
self.other.BinEdges_left = self.BinEdges_left
self.other.BinEdges_right = self.BinEdges_right
print('BinEdges_left =', self.BinEdges_left)
print('BinEdges_right =', self.BinEdges_right)
if self.BinEdges_right[-1] > self.BinEdges[-1]:
# add fake bins at higher energies
index = len(self.BinData) - num_rebinned_bins
data, error = Rebin_data(self.BinData[index:], self.BinError[index:])
num_added_bins = round((self.BinEdges_right[-1] - self.BinEdges[-1]) /
(self.BinEdges[-1] - self.BinEdges[-2]))
added_edges = np.linspace(self.BinEdges[-1], self.BinEdges_right[-1],
num_added_bins + 1)
self.BinEdges = np.append(self.BinEdges, added_edges)
self.BinData = np.append(self.BinData,
[data/num_rebinned_bins] * num_added_bins)
self.BinError = np.append(self.BinError,
[error/np.sqrt(num_rebinned_bins)] * num_added_bins)
print('BinEdges =', self.BinEdges)
print('BinData =', self.BinData)
print('BinError =', self.BinError)
# combine multiple bins to fit the edges from self.BinEdges_left and _right
self.BinData_rebinned = []
self.BinError_rebinned = []
for index in range(len(self.BinEdges_left)):
data = np.array([d for i, d in enumerate(self.BinData)
if self.BinEdges[i] >= self.BinEdges_left[index] and
self.BinEdges[i + 1] <= self.BinEdges_right[index]])
error = np.array([d for i, d in enumerate(self.BinError)
if self.BinEdges[i] >= self.BinEdges_left[index] and
self.BinEdges[i + 1] <= self.BinEdges_right[index]])
print('data =', data)
print('error =', error)
data_rebinned, error_rebinned = Rebin_data(data, error)
self.BinData_rebinned.append(data_rebinned)
self.BinError_rebinned.append(error_rebinned)
print('BinData_rebinned =', self.BinData_rebinned)
print('BinError_rebinned =', self.BinError_rebinned)
def UpperLimit(self, mx, fp, fn, delta, vmin_min, vmin_max, vmin_step,
output_file, initial_energy_bin=[2, 4], vmax=None, processes=None,
**unused_kwargs):
if delta != 0:
raise ValueError('delta has to be zero for DAMA halo-independent ' +
'combined analysis!')
if vmax is None:
vmax = vmin_step
self._Rebin(initial_energy_bin, vmax, mx)
box_table = self._Boxes(mx, fp, fn, delta, vmax=vmin_max, processes=processes,
output_file=output_file)
box_table_other = self.other._Boxes(mx, fp, fn, delta, vmax=vmin_max,
processes=processes, output_file=output_file)
print('box_table =')
print(repr(box_table))
print('box_table_other =')
print(repr(box_table_other))
int_resp_list = box_table[:, 0]
int_resp_list_other = box_table_other[:, 0]
vmin_center_list = box_table_other[:, 1]
vmin_error_left_list = box_table_other[:, 2]
vmin_error_right_list = box_table_other[:, 3]
size = len(int_resp_list)
int_resp_matrix = np.vstack((np.hstack((np.zeros((size - 1, 1)),
np.diag(int_resp_list[:-1]))),
np.zeros(size)))
int_resp_matrix += np.diag(int_resp_list_other)
print('int_resp_matrix =', int_resp_matrix)
int_resp_inverse = np.linalg.inv(int_resp_matrix)
eta_list = np.dot(int_resp_inverse, self.BinData_rebinned)
eta_error_list = np.sqrt(np.dot(int_resp_inverse ** 2,
np.array(self.BinError_rebinned) ** 2))
result = np.array([int_resp_list + int_resp_list_other,
vmin_center_list, vmin_error_left_list, vmin_error_right_list,
eta_list, eta_error_list])
print(result)
with open(output_file, 'ab') as f_handle:
np.savetxt(f_handle, result)
return result
class Standard_Halo_Model:
def __init__(self, exper_name, log_sigma_p):
self.name = exper_name
self.log_sigma_p = log_sigma_p
def UpperLimit(self, mx, fp, fn, delta, vmin_min, vmin_max, vmin_step,
output_file, **unused_kwargs):
if "eta0" in self.name:
eta = eta0Maxwellian
else:
eta = eta1Maxwellian
vmin_list = np.linspace(vmin_min, vmin_max, (vmin_max - vmin_min)/vmin_step + 1)
# print('vmin_list =', vmin_list)
eta_list = eta(vmin_list, vobs, v0bar, vesc)
eta_list = np.array([i if i > 0 else np.inf for i in eta_list])
# print('eta_list =', eta_list)
log_eta_list = self.log_sigma_p + np.log10(conversion_factor / mx * eta_list)
# print('log_eta_list =', log_eta_list)
result = np.transpose([vmin_list, log_eta_list])
result = result[result[:, 1] != np.inf]
print(result)
with open(output_file, 'ab') as f_handle:
np.savetxt(f_handle, result)
return result
| Andreea-G/Codds_DarkMatter | src/experiment_HaloIndep.py | Python | gpl-2.0 | 31,067 |
from __future__ import division
from __future__ import generators | Microsoft/PTVS | Python/Tests/TestData/Grammar/FromFuture24.py | Python | apache-2.0 | 65 |
# HEX = '{:x}'.format
#
#
# class Converter(object):
# @staticmethod
# def to_ascii(s):
# return ''.join(chr(int(s[a:a + 2], 16)) for a in xrange(0, len(s), 2))
#
# @staticmethod
# def to_hex(s):
# return ''.join(HEX(ord(b)) for b in s)
class Converter(object):
@staticmethod
def to_ascii(s):
return s.decode('hex')
@staticmethod
def to_hex(s):
return s.encode('hex')
| the-zebulan/CodeWars | katas/kyu_5/ascii_hex_converter.py | Python | mit | 435 |
import os
from gears.environment import Environment
from gears.finders import FileSystemFinder
from gears_coffeescript import CoffeeScriptCompiler
ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
ASSETS_DIR = os.path.join(ROOT_DIR, 'assets')
STATIC_DIR = os.path.join(ROOT_DIR, 'static')
env = Environment(STATIC_DIR)
env.finders.register(FileSystemFinder([ASSETS_DIR]))
env.compilers.register('.coffee', CoffeeScriptCompiler.as_handler())
env.register_defaults()
if __name__ == '__main__':
env.save()
| gears/gears-coffeescript | example/compile.py | Python | isc | 518 |
#!/usr/bin/env python
###############################################################################
# #
# fileEntity.py #
# #
# Represent a file / folder with path etc #
# #
# Copyright (C) Michael Imelfort #
# #
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
__author__ = "Michael Imelfort"
__copyright__ = "Copyright 2014"
__credits__ = ["Michael Imelfort"]
__license__ = "GPLv3"
__version__ = "0.1.0"
__maintainer__ = "Michael Imelfort"
__email__ = "[email protected]"
__status__ = "Beta"
###############################################################################
###############################################################################
###############################################################################
###############################################################################
# system includes
import sys
import os
# local includes
###############################################################################
###############################################################################
###############################################################################
###############################################################################
class FileEntity(object):
"""Basic file entity"""
def __init__(self,
name, # the name of the entity on the file system ( Full path to root dir if id: ROOT)
path, # the local path to this entity
parent, # the entity (type == 'dir') that contains this. ( None for id: ROOT )
hashd, # hash of the entity if type == 'file'
size # size of the file in bytes
):
self.name = name
self.path=path
self.parent = parent
self.hashd = hashd
self.size = size
def getFullPath(self):
"""get the full path to this entity"""
if self.parent == None:
return ""
else:
return os.path.join(self.parent.getFullPath(), self.name)
def checkIntegrity(self):
"""Check the file for corruption"""
if self.type == 'dir':
return True
else:
# check the hashd and compare against the recorded MD5
return True
def __str__(self):
if self.parent is not None:
return "\t".join([os.path.join(self.path,self.name),self.hashd,str(self.size)])
return ""
#------------------------------------------------------------------------------
# Handling IDs
###############################################################################
###############################################################################
###############################################################################
###############################################################################
| minillinim/ScreamingBackpack | screamingbackpack/fileEntity.py | Python | gpl-3.0 | 4,536 |
#! /usr/bin/env python3
"""
http://oj.leetcode.com/problems/binary-tree-level-order-traversal/
Given a binary tree, return the level order traversal of its nodes' values.
(ie, from left to right, level by level).
For example:
Given binary tree {3,9,20,#,#,15,7},
3
/ \
9 20
/ \
15 7
return its level order traversal as:
[
[3],
[9,20],
[15,7]
]
Since Apr-10-2014 18:36
"""
# Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param root, a tree node
# @return a list of lists of integers
def levelOrder(self, root):
res = []
if not root:
return []
else:
cur_level_nodes = [root]
res.append([node.val for node in cur_level_nodes])
while True:
next_level_nodes = []
for node in cur_level_nodes:
if node.left:
next_level_nodes.append(node.left)
if node.right:
next_level_nodes.append(node.right)
if not next_level_nodes:
break
res.append([node.val for node in next_level_nodes])
cur_level_nodes = next_level_nodes
return res
if __name__ == '__main__':
s = Solution()
n1 = TreeNode(1)
n2 = TreeNode(2)
n3 = TreeNode(3)
n4 = TreeNode(4)
n5 = TreeNode(5)
n1.left = n2
n1.right = n3
n3.left = n4
n3.right = n5
print(s.levelOrder(n1))
print(s.levelOrder(None))
| deput/leetcode | binary_tree_level_order_traversal.py | Python | mit | 1,634 |
import os
# We'll render HTML templates and access data sent by POST
# using the request object from flask. Redirect and url_for
# will be used to redirect the user once the upload is done
# and send_from_directory will help us to send/show on the
# browser the file that the user just uploaded
from flask import Flask, render_template, request, flash, redirect, url_for, send_from_directory
from app import app
from werkzeug.utils import secure_filename
# For a given file, return whether it's an allowed type or not
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
@app.route('/', methods=['GET','POST'])
@app.route('/index', methods=['GET','POST'])
def index():
if request.method == 'POST':
# check if the post request has the file part
filename = []
for upfile in ['filewohr','filewhr']:
if upfile not in request.files:
flash('No file part')
return redirect(request.url)
# Get the name of the uploaded file
file = request.files[upfile]
# if user does not select file, browser also
# submits a empty part without filename
if file.filename == '':
flash('No selected file')
return redirect(request.url)
# Check if the file is one of the allowed types/extensions
if file and allowed_file(file.filename):
# Make the filename safe, remove unsupported chars
filename.append(secure_filename(file.filename))
# Move the file form the temporary folder to the upload folder
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename[-1]))
else:
flash('Not allowed file')
return redirect(request.url)
# Render the file template
return render_template('file.html',
folder = app.config['UPLOAD_FOLDER'],
filenamewohr = filename[0],
filenamewhr = filename[1],
scroll = 'results')
return render_template('index.html')
| davidbradway/fusefit | webui/app/views.py | Python | mit | 2,160 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'tanchao'
import sys
SIZE = 256 # defined in challenge description
MATRIX = [[0] * SIZE for i in range(SIZE)]
def set_col(orders_):
col_ = orders_[1] - 1
val_ = orders_[2]
for i in range(SIZE):
MATRIX[i][col_] = val_
def set_row(orders_):
row_ = orders_[1] - 1
val_ = orders_[2]
for j in range(SIZE):
MATRIX[row_][j] = val_
def query_col(orders_):
col_ = orders_[1] - 1
sum_ = 0
for i in range(SIZE):
sum_ += MATRIX[i][col_]
print sum_
def query_row(orders_):
row_ = orders_[1] - 1
sum_ = 0
for j in range(SIZE):
sum_ += MATRIX[row_][j]
print sum_
def query_board(order_):
orders_ = order_.split(' ') # no validation on input
orders_[1] = int(orders_[1])
if len(orders_) == 3:
orders_[2] = int(orders_[2])
if orders_[0] == 'SetCol':
set_col(orders_)
if orders_[0] == 'SetRow':
set_row(orders_)
if orders_[0] == 'QueryCol':
query_col(orders_)
if orders_[0] == 'QueryRow':
query_row(orders_)
with open(sys.argv[1], 'r') as test_cases:
for test in test_cases:
test = test.strip()
if test: # remove ending code '\n' and not empty string
query_board(test) | tanchao/algo | archive/codeeval/py/query_board.py | Python | mit | 1,315 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Everytrace(CMakePackage):
"""Get stack trace EVERY time a program exits."""
homepage = "https://github.com/citibeth/everytrace"
url = "https://github.com/citibeth/everytrace/archive/0.2.2.tar.gz"
git = "https://github.com/citibeth/everytrace.git"
maintainers = ['citibeth']
version('develop', branch='develop')
version('0.2.2', sha256='0487276bb24e648388862d8e1d8cfe56b529f7e3d840df3fcb5b3a3dad4016e1')
variant('mpi', default=True, description='Enables MPI parallelism')
variant('fortran', default=True,
description='Enable use with Fortran programs')
variant('cxx', default=True, description='Enable C++ Exception-based features')
depends_on('mpi', when='+mpi')
def cmake_args(self):
spec = self.spec
return [
'-DUSE_MPI=%s' % ('YES' if '+mpi' in spec else 'NO'),
'-DUSE_FORTRAN=%s' % ('YES' if '+fortran' in spec else 'NO'),
'-DUSE_CXX=%s' % ('YES' if '+cxx' in spec else 'NO')]
| iulian787/spack | var/spack/repos/builtin/packages/everytrace/package.py | Python | lgpl-2.1 | 1,235 |
#
# robin.py
#
# Apply customizations for a MKS Robin
def prepare(address, ldname, fwname):
import pioutil
if pioutil.is_pio_build():
import marlin
def encrypt(source, target, env):
marlin.encrypt_mks(source, target, env, fwname)
marlin.relocate_firmware(address)
marlin.custom_ld_script(ldname)
marlin.add_post_action(encrypt);
| LVD-AC/Marlin | buildroot/share/PlatformIO/scripts/robin.py | Python | gpl-3.0 | 345 |
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/Fortran/module-subdir.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Validate that $FORTRANMODDIR values get expanded correctly on Fortran
command lines relative to the appropriate subdirectory.
"""
import os
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.subdir('subdir',
['subdir', 'src'],
['subdir', 'build'])
test.write('myfortran.py', r"""
import getopt
import os
import sys
comment = '#' + sys.argv[1]
length = len(comment)
opts, args = getopt.getopt(sys.argv[2:], 'cM:o:')
for opt, arg in opts:
if opt == '-o': out = arg
elif opt == '-M': modsubdir = arg
import os
infile = open(args[0], 'rb')
outfile = open(out, 'wb')
for l in infile.readlines():
if l[:7] == 'module ':
module = modsubdir + os.sep + l[7:-1] + '.mod'
open(module, 'wb').write('myfortran.py wrote %s\n' % module)
if l[:length] != comment:
outfile.write(l)
sys.exit(0)
""")
test.write('myar.py', """\
import sys
t = open(sys.argv[1], 'wb')
for s in sys.argv[2:]:
t.write(open(s, 'rb').read())
t.close
sys.exit(0)
""")
test.write('SConstruct', """\
env = Environment(FORTRANMODDIRPREFIX = '-M',
FORTRANMODDIR = 'modules',
FORTRAN = r'%(_python_)s myfortran.py fortran',
AR = 'myar.py',
ARCOM = r'%(_python_)s $AR $TARGET $SOURCES',
RANLIBCOM = '')
Export('env')
objs = SConscript('subdir/SConscript')
env.Library('bidule', objs)
""" % locals())
test.write(['subdir', 'SConscript'], """\
Import('env')
env['FORTRANMODDIR'] = 'build'
sources = ['src/modfile.f']
objs = env.Object(sources)
Return("objs")
""")
test.write(['subdir', 'src', 'modfile.f'], """\
#fortran comment
module somemodule
integer :: nothing
end module
""")
test.run(arguments = '.')
somemodule = os.path.join('subdir', 'build', 'somemodule.mod')
expect = "myfortran.py wrote %s\n" % somemodule
test.must_match(['subdir', 'build', 'somemodule.mod'], expect)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| EmanueleCannizzaro/scons | test/Fortran/module-subdir.py | Python | mit | 3,309 |
"""
This file is part of ocean.
SEA is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SEA is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SEA. If not, see <http://www.gnu.org/licenses/>.
Copyright 2014 by neuromancer
"""
from math import ceil
from Types import Type
from ptrace.error import PtraceError
def FindModule(value, mm):
return mm.findModule(value)
def RefinePType(ptype, value, process, mm):
if value is None:
return (Type("Top32",4), value)
if str(ptype) == "Ptr32":
ptr = value
if ptr == 0x0:
return (Type("NPtr32",4), ptr)
else:
try:
_ = process.readBytes(ptr, 1)
except PtraceError:
#print "Dptr", hex(ptr)
return (Type("DPtr32",4), ptr)
mm.checkPtr(ptr)
if mm.isStackPtr(ptr):
return (Type("SPtr32",4), ptr)
elif mm.isHeapPtr(ptr):
return (Type("HPtr32",4), ptr)
elif mm.isCodePtr(ptr):
return (Type("GxPtr32",4), ptr)
elif mm.isFilePtr(ptr):
return (Type("FPtr32",4), ptr)
elif mm.isGlobalPtr(ptr):
return (Type("GPtr32",4), ptr)
else:
return (Type("Ptr32",4), ptr)
elif str(ptype) == "Num32":
num = value
if num == 0x0:
return (Type("Num32B0",4), num)
else:
binlen = len(bin(num))-2
binlen = int(ceil(binlen / 8.0))*8
return (Type("Num32B"+str(binlen),4), num)
return (Type("Top32",4), value)
| neuromancer/ocean | src/Analysis.py | Python | gpl-3.0 | 1,848 |
"""Brings up a DNS server with container (skydns + skydock) that allow
different dockers to see each other by hostnames.
"""
import common
import docker
def up(uid):
create_service = '{0}/createService.js'.format(common.get_script_dir())
skydns = docker.run(
image='crosbymichael/skydns',
detach=True,
name=common.format_dockername('skydns', uid),
command=['-nameserver', '8.8.8.8:53', '-domain', 'docker'])
skydock = docker.run(
image='crosbymichael/skydock',
detach=True,
name=common.format_dockername('skydock', uid),
reflect=[('/var/run/docker.sock', 'rw')],
volumes=[(create_service, '/createService.js', 'ro')],
command=['-ttl', '30', '-environment', 'dev', '-s',
'/var/run/docker.sock',
'-domain', 'docker', '-name', 'skydns_{0}'.format(uid),
'-plugins',
'/createService.js'])
skydns_config = docker.inspect(skydns)
dns = skydns_config['NetworkSettings']['IPAddress']
return {'dns': dns, 'docker_ids': [skydns, skydock]}
| xorver/oneprovider_ccm | bamboos/docker/environment/dns.py | Python | mit | 1,107 |
../../../../share/pyshared/oauth/__init__.py | Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/oauth/__init__.py | Python | gpl-3.0 | 44 |
# -*- coding: UTF-8 -*-
__kupfer_name__ = _("Shorten Links")
__kupfer_actions__ = ("ShortenLinks", )
__description__ = _("Create short aliases of long URLs")
__version__ = "2011-03-01"
__author__ = "Karol Będkowski <[email protected]>"
import httplib
import urllib
from kupfer.objects import Leaf, Action, Source, UrlLeaf, OperationError
from kupfer.plugin import ssl_support
from kupfer import pretty
class _ShortLinksService(Leaf):
def __init__(self, name):
Leaf.__init__(self, name, name)
def get_icon_name(self):
return "text-html"
class _GETService(_ShortLinksService, pretty.OutputMixin):
""" A unified shortener service working with GET requests """
host = None
path = None
url_key = "url"
use_https = False
def process(self, url):
"""Shorten @url or raise ValueError"""
query_string = urllib.urlencode({self.url_key : url})
try:
if self.use_https and ssl_support.is_supported():
conn = ssl_support.VerifiedHTTPSConnection(self.host, timeout=5)
pretty.print_debug(__name__, "Connected SSL to", self.host)
else:
conn = httplib.HTTPConnection(self.host, timeout=5)
conn.request("GET", self.path+query_string)
resp = conn.getresponse()
if resp.status != 200:
raise ValueError('Invalid response %d, %s' % (resp.status,
resp.reason))
result = resp.read()
return result.strip()
except (httplib.HTTPException, IOError, ValueError) as exc:
raise ValueError(exc)
return _('Error')
# NOTE: It's important that we use only sites that provide a stable API
class TinyUrl(_GETService):
"""
Website: http://tinyurl.com
"""
host = "tinyurl.com"
path = "/api-create.php?"
def __init__(self):
_ShortLinksService.__init__(self, u'TinyUrl.com')
class IsGd(_GETService):
"""
Website: http://is.gd
Reference: http://is.gd/apishorteningreference.php
"""
host = 'is.gd'
path = '/create.php?format=simple&'
def __init__(self):
_ShortLinksService.__init__(self, u'Is.gd')
class VGd(_GETService):
"""
Website: http://v.gd
Reference: http://v.gd/apishorteningreference.php
Like is.gd, but v.gd always shows a preview page.
"""
host = 'v.gd'
path = '/create.php?format=simple&'
def __init__(self):
_ShortLinksService.__init__(self, u'V.gd')
class BitLy(_GETService):
"""
Website: http://bit.ly
Reference: http://code.google.com/p/bitly-api/wiki/ApiDocumentation
"""
# No password is available for this login name,
# yet there is a possibility that you could track
# all URLs shortened using this API key
BITLY_LOGIN = "kupferkupfer"
BITLY_API_KEY = "R_a617770f00b647d6c22ce162105125c2"
host = 'api.bitly.com'
path = ('/v3/shorten?login=%s&apiKey=%s&format=txt&' %
(BITLY_LOGIN, BITLY_API_KEY))
url_key = "longUrl"
def __init__(self):
_ShortLinksService.__init__(self, u'Bit.ly')
class BitLySSL(BitLy):
host = 'api-ssl.bitly.com'
use_https = True
def __init__(self):
_ShortLinksService.__init__(self, u'Bit.ly (HTTPS)')
def process(self, url):
resp = BitLy.process(self, url)
return resp.replace("http://bit.ly", "https://bit.ly")
class ShortenLinks(Action):
''' Shorten links with selected engine '''
def __init__(self):
Action.__init__(self, _('Shorten With...'))
def has_result(self):
return True
def activate(self, leaf, iobj):
try:
result = iobj.process(leaf.object)
except ValueError as exc:
raise OperationError(unicode(exc))
return UrlLeaf(result, result)
def item_types(self):
yield UrlLeaf
def requires_object(self):
return True
def object_types(self):
yield _ShortLinksService
def object_source(self, for_item=None):
return ServicesSource()
def get_description(self):
return __description__
class ServicesSource(Source):
def __init__(self):
Source.__init__(self, _("Services"))
def get_items(self):
yield TinyUrl()
yield IsGd()
yield VGd()
yield BitLy()
if ssl_support.is_supported():
yield BitLySSL()
def should_sort_lexically(self):
return True
def get_icon_name(self):
return "applications-internet"
| Theragon/kupfer | kupfer/plugin/shorten_links.py | Python | gpl-3.0 | 4,025 |
#!/usr/bin/env python
# Copyright 2021 Google LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Analytics Admin API sample application which prints details for the
Google Analytics 4 property user link using a batch call.
See https://developers.google.com/analytics/devguides/config/admin/v1/rest/v1alpha/properties.userLinks/batchGet
for more information.
"""
# [START analyticsadmin_properties_user_links_batch_get]
from google.analytics.admin import AnalyticsAdminServiceClient
from google.analytics.admin_v1alpha.types import BatchGetUserLinksRequest
def run_sample():
"""Runs the sample."""
# TODO(developer): Replace this variable with your Google Analytics 4
# property ID (e.g. "123456") before running the sample.
property_id = "YOUR-GA4-PROPERTY-ID"
# TODO(developer): Replace this variable with your Google Analytics
# account user link ID (e.g. "123456") before running the sample.
property_user_link_id = "YOUR-ACCOUNT-USER-LINK-ID"
batch_get_property_user_link(property_id, property_user_link_id)
def batch_get_property_user_link(property_id, property_user_link_id):
"""Retrieves details for the Google Analytics 4 property user link using a
batch call."""
client = AnalyticsAdminServiceClient()
response = client.batch_get_user_links(
BatchGetUserLinksRequest(
parent=f"properties/{property_id}",
names=[f"properties/{property_id}/userLinks/{property_user_link_id}"],
)
)
print("Result:")
for user_link in response.user_links:
print(user_link)
print()
# [END analyticsadmin_properties_user_links_batch_get]
if __name__ == "__main__":
run_sample()
| googleapis/python-analytics-admin | samples/properties_user_links_batch_get.py | Python | apache-2.0 | 2,230 |
#!/usr/bin/env python
#
# GrovePi Example for using the Grove Temperature & Humidity Sensor Pro
# (http://www.seeedstudio.com/wiki/Grove_-_Temperature_and_Humidity_Sensor_Pro)
#
# The GrovePi connects the Raspberry Pi and Grove sensors.
# You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import grovepi
# Connect the Grove Temperature & Humidity Sensor Pro to digital port D4
# This example uses the blue colored sensor.
# SIG,NC,VCC,GND
sensor = 4 # The Sensor goes on digital port 4.
# temp_humidity_sensor_type
# Grove Base Kit comes with the blue sensor.
blue = 0 # The Blue colored sensor.
white = 1 # The White colored sensor.
while True:
try:
# This example uses the blue colored sensor.
# The first parameter is the port, the second parameter is the type of sensor.
[temp,humidity] = grovepi.dht(sensor,blue)
print("temp = %.02f C humidity =%.02f%%"%(temp, humidity))
except IOError:
print ("Error")
| penoud/GrovePi | Software/Python/grove_dht_pro.py | Python | mit | 2,333 |
#
# Copyright (C) 2010 Cardapio Team ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
class CardapioPlugin (CardapioPluginInterface):
author = 'Cardapio team'
name = _('Command Launcher')
description = _('Run commands from the search box')
url = ''
help_text = ''
version = '1.13'
plugin_api_version = 1.40
search_delay_type = None
default_keyword = 'run'
category_name = _('Run Command')
category_icon = 'system-run'
icon = 'system-run'
category_tooltip = _('Run system commands, just like in the command-line')
fallback_icon = 'system-run'
hide_from_sidebar = True
def __init__(self, cardapio_proxy, category):
'''
This method is called when the plugin is enabled.
Nothing much to be done here except initialize variables and set loaded to True
'''
self.c = cardapio_proxy
try:
import os
from glob import iglob
except Exception, exception:
self.c.write_to_log(self, 'Could not import certain modules', is_error = True)
self.c.write_to_log(self, exception, is_error = True)
self.loaded = False
return
self.os = os
self.iglob = iglob
self.pathlist = os.environ['PATH'].split(':')
self.in_a_terminal = _('Execute \'%s\' In Terminal')
self.in_a_terminal_tooltip = _('Execute the command \'%s\' inside a new terminal window')
self.as_root = _('Execute \'%s\' As Root')
self.as_root_tooltip = _('Execute the command \'%s\' with administrative rights')
self.loaded = True # set to true if everything goes well
def search(self, text, result_limit):
self.current_query = text
results = []
text_list = text.split(None, 1)
cmdname = text_list[0]
if len(text_list) == 2:
args = ' ' + text_list[1]
else:
args = ''
num_results = 0
for path in self.pathlist:
if num_results >= result_limit: break
cmd_iter = self.iglob('%s/%s*' % (path, cmdname))
while num_results < result_limit:
try: cmd = self.os.path.basename(cmd_iter.next())
except StopIteration: break
cmdargs = cmd + args
item = {
'name' : cmdargs,
'tooltip' : 'Run \'%s\'' % cmdargs,
'icon name' : cmd,
'type' : 'raw-no-notification',
'command' : cmdargs,
'context menu' : [
{
'name' : self.in_a_terminal % cmdargs,
'tooltip' : self.in_a_terminal_tooltip % cmdargs,
'icon name' : 'utilities-terminal',
'type' : 'raw-in-terminal',
#'command' : 'gnome-terminal -x bash -c \"%s ; bash\"' % cmdargs
'command' : cmdargs
},
{
'name' : self.as_root % cmdargs,
'tooltip' : self.as_root_tooltip % cmdargs,
'icon name' : cmd,
'type' : 'raw',
'command' : 'gksudo \"%s\"' % cmdargs
}]
}
results.append(item)
num_results += 1
results.sort(key = lambda r: r['name'])
# Thiago> if the command was not found, don't display anything
#
# if not results:
# results.append({
# 'name' : text,
# 'tooltip' : 'Run \'%s\'' % text,
# 'icon name' : 'system-run',
# 'type' : 'raw',
# 'command' : text,
# 'context menu' : [
# {
# 'name' : self.in_a_terminal % text,
# 'tooltip' : self.in_a_terminal_tooltip % text,
# 'icon name' : 'utilities-terminal',
# 'type' : 'raw',
# 'command' : 'gnome-terminal -x bash -c \"%s ; bash\"' % text
# },
# {
# 'name' : self.as_root % text,
# 'tooltip' : self.as_root_tooltip % text,
# 'icon name' : 'system-run',
# 'type' : 'raw',
# 'command' : 'gksudo \"%s\"' % text
# }]
# })
self.c.handle_search_result(self, results, self.current_query)
| genesi/cardapio | src/plugins/command_launcher.py | Python | gpl-3.0 | 4,494 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2018 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_address_object
short_description: Create address objects on PAN-OS devices.
description:
- Create address objects on PAN-OS devices.
author:
- Michael Richardson (@mrichardson03)
- Garfield Lee Freeman (@shinmog)
version_added: "2.8"
requirements:
- pan-python can be obtained from PyPI U(https://pypi.python.org/pypi/pan-python)
- pandevice can be obtained from PyPI U(https://pypi.python.org/pypi/pandevice)
notes:
- Panorama is supported.
- Check mode is supported.
extends_documentation_fragment:
- panos.transitional_provider
- panos.vsys
- panos.device_group
- panos.state
options:
name:
description:
- Name of object to create.
required: true
value:
description:
- IP address, IP range, or FQDN for the object. Must specify if state is I(present).
required: true
address_type:
description:
- Type of address object.
choices: ['ip-netmask', 'ip-range', 'fqdn']
default: 'ip-netmask'
description:
description:
- Descriptive name for this address object.
tag:
description:
- List of tags to add to this address object.
type: list
commit:
description:
- Commit changes after creating object. If I(ip_address) is a Panorama device, and I(device_group) is
also set, perform a commit to Panorama and a commit-all to the device group.
required: false
type: bool
default: true
'''
EXAMPLES = '''
- name: Create object 'Test-One'
panos_address_object:
provider: '{{ provider }}'
name: 'Test-One'
value: '1.1.1.1'
description: 'Description One'
tag: ['Prod']
- name: Create object 'Test-Two'
panos_address_object:
provider: '{{ provider }}'
name: 'Test-Two'
address_type: 'ip-range'
value: '1.1.1.1-2.2.2.2'
description: 'Description Two'
tag: ['SI']
- name: Create object 'Test-Three'
panos_address_object:
provider: '{{ provider }}'
name: 'Test-Three'
address_type: 'fqdn'
value: 'foo.bar.baz'
description: 'Description Three'
- name: Delete object 'Test-Two'
panos_address_object:
provider: '{{ provider }}'
name: 'Test-Two'
state: 'absent'
'''
RETURN = '''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.panos.panos import get_connection
try:
from pandevice.objects import AddressObject
from pandevice.errors import PanDeviceError
except ImportError:
pass
def main():
helper = get_connection(
vsys=True,
device_group=True,
with_classic_provider_spec=True,
with_state=True,
argument_spec=dict(
name=dict(required=True),
value=dict(),
address_type=dict(default='ip-netmask', choices=['ip-netmask', 'ip-range', 'fqdn']),
description=dict(),
tag=dict(type='list'),
commit=dict(type='bool', default=True),
),
)
module = AnsibleModule(
argument_spec=helper.argument_spec,
required_one_of=helper.required_one_of,
supports_check_mode=True,
)
# Verify libs are present, get parent object.
parent = helper.get_pandevice_parent(module)
# Object params.
spec = {
'name': module.params['name'],
'value': module.params['value'],
'type': module.params['address_type'],
'description': module.params['description'],
'tag': module.params['tag'],
}
# Other info.
commit = module.params['commit']
# Retrieve current info.
try:
listing = AddressObject.refreshall(parent, add=False)
except PanDeviceError as e:
module.fail_json(msg='Failed refresh: {0}'.format(e))
# Build the object based on the user spec.
obj = AddressObject(**spec)
parent.add(obj)
# Apply the state.
changed = helper.apply_state(obj, listing, module)
# Commit.
if commit and changed:
helper.commit(module)
# Done.
module.exit_json(changed=changed)
if __name__ == '__main__':
main()
| PaloAltoNetworks-BD/ansible-pan | library/panos_address_object.py | Python | isc | 5,097 |
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Built-in mask mixin class.
The design uses `Masked` as a factory class which automatically
generates new subclasses for any data class that is itself a
subclass of a predefined masked class, with `MaskedNDArray`
providing such a predefined class for `~numpy.ndarray`.
Generally, any new predefined class should override the
``from_unmasked(data, mask, copy=False)`` class method that
creates an instance from unmasked data and a mask, as well as
the ``unmasked`` property that returns just the data.
The `Masked` class itself provides a base ``mask`` property,
which can also be overridden if needed.
"""
import builtins
import numpy as np
from astropy.utils.shapes import NDArrayShapeMethods
from astropy.utils.data_info import ParentDtypeInfo
from .function_helpers import (MASKED_SAFE_FUNCTIONS,
APPLY_TO_BOTH_FUNCTIONS,
DISPATCHED_FUNCTIONS,
UNSUPPORTED_FUNCTIONS)
__all__ = ['Masked', 'MaskedNDArray']
get__doc__ = """Masked version of {0.__name__}.
Except for the ability to pass in a ``mask``, parameters are
as for `{0.__module__}.{0.__name__}`.
""".format
class Masked(NDArrayShapeMethods):
"""A scalar value or array of values with associated mask.
The resulting instance will take its exact type from whatever the
contents are, with the type generated on the fly as needed.
Parameters
----------
data : array-like
The data for which a mask is to be added. The result will be a
a subclass of the type of ``data``.
mask : array-like of bool, optional
The initial mask to assign. If not given, taken from the data.
copy : bool
Whether the data and mask should be copied. Default: `False`.
"""
_base_classes = {}
"""Explicitly defined masked classes keyed by their unmasked counterparts.
For subclasses of these unmasked classes, masked counterparts can be generated.
"""
_masked_classes = {}
"""Masked classes keyed by their unmasked data counterparts."""
def __new__(cls, *args, **kwargs):
if cls is Masked:
# Initializing with Masked itself means we're in "factory mode".
if not kwargs and len(args) == 1 and isinstance(args[0], type):
# Create a new masked class.
return cls._get_masked_cls(args[0])
else:
return cls._get_masked_instance(*args, **kwargs)
else:
# Otherwise we're a subclass and should just pass information on.
return super().__new__(cls, *args, **kwargs)
def __init_subclass__(cls, base_cls=None, data_cls=None, **kwargs):
"""Register a Masked subclass.
Parameters
----------
base_cls : type, optional
If given, it is taken to mean that ``cls`` can be used as
a base for masked versions of all subclasses of ``base_cls``,
so it is registered as such in ``_base_classes``.
data_cls : type, optional
If given, ``cls`` should will be registered as the masked version of
``data_cls``. Will set the private ``cls._data_cls`` attribute,
and auto-generate a docstring if not present already.
**kwargs
Passed on for possible further initialization by superclasses.
"""
if base_cls is not None:
Masked._base_classes[base_cls] = cls
if data_cls is not None:
cls._data_cls = data_cls
cls._masked_classes[data_cls] = cls
if cls.__doc__ is None:
cls.__doc__ = get__doc__(data_cls)
super().__init_subclass__(**kwargs)
# This base implementation just uses the class initializer.
# Subclasses can override this in case the class does not work
# with this signature, or to provide a faster implementation.
@classmethod
def from_unmasked(cls, data, mask=None, copy=False):
"""Create an instance from unmasked data and a mask."""
return cls(data, mask=mask, copy=copy)
@classmethod
def _get_masked_instance(cls, data, mask=None, copy=False):
data, data_mask = cls._get_data_and_mask(data)
if mask is None:
mask = False if data_mask is None else data_mask
masked_cls = cls._get_masked_cls(data.__class__)
return masked_cls.from_unmasked(data, mask, copy)
@classmethod
def _get_masked_cls(cls, data_cls):
"""Get the masked wrapper for a given data class.
If the data class does not exist yet but is a subclass of any of the
registered base data classes, it is automatically generated
(except we skip `~numpy.ma.MaskedArray` subclasses, since then the
masking mechanisms would interfere).
"""
if issubclass(data_cls, (Masked, np.ma.MaskedArray)):
return data_cls
masked_cls = cls._masked_classes.get(data_cls)
if masked_cls is None:
# Walk through MRO and find closest base data class.
# Note: right now, will basically always be ndarray, but
# one could imagine needing some special care for one subclass,
# which would then get its own entry. E.g., if MaskedAngle
# defined something special, then MaskedLongitude should depend
# on it.
for mro_item in data_cls.__mro__:
base_cls = cls._base_classes.get(mro_item)
if base_cls is not None:
break
else:
# Just hope that MaskedNDArray can handle it.
# TODO: this covers the case where a user puts in a list or so,
# but for those one could just explicitly do something like
# _masked_classes[list] = MaskedNDArray.
return MaskedNDArray
# Create (and therefore register) new Masked subclass for the
# given data_cls.
masked_cls = type('Masked' + data_cls.__name__,
(data_cls, base_cls), {}, data_cls=data_cls)
return masked_cls
@classmethod
def _get_data_and_mask(cls, data, allow_ma_masked=False):
"""Split data into unmasked and mask, if present.
Parameters
----------
data : array-like
Possibly masked item, judged by whether it has a ``mask`` attribute.
If so, checks for being an instance of `~astropy.utils.masked.Masked`
or `~numpy.ma.MaskedArray`, and gets unmasked data appropriately.
allow_ma_masked : bool, optional
Whether or not to process `~numpy.ma.masked`, i.e., an item that
implies no data but the presence of a mask.
Returns
-------
unmasked, mask : array-like
Unmasked will be `None` for `~numpy.ma.masked`.
Raises
------
ValueError
If `~numpy.ma.masked` is passed in and ``allow_ma_masked`` is not set.
"""
mask = getattr(data, 'mask', None)
if mask is not None:
try:
data = data.unmasked
except AttributeError:
if not isinstance(data, np.ma.MaskedArray):
raise
if data is np.ma.masked:
if allow_ma_masked:
data = None
else:
raise ValueError('cannot handle np.ma.masked here.') from None
else:
data = data.data
return data, mask
@classmethod
def _get_data_and_masks(cls, *args):
data_masks = [cls._get_data_and_mask(arg) for arg in args]
return (tuple(data for data, _ in data_masks),
tuple(mask for _, mask in data_masks))
def _get_mask(self):
"""The mask.
If set, replace the original mask, with whatever it is set with,
using a view if no broadcasting or type conversion is required.
"""
return self._mask
def _set_mask(self, mask, copy=False):
self_dtype = getattr(self, 'dtype', None)
mask_dtype = (np.ma.make_mask_descr(self_dtype)
if self_dtype and self_dtype.names else np.dtype('?'))
ma = np.asanyarray(mask, dtype=mask_dtype)
if ma.shape != self.shape:
# This will fail (correctly) if not broadcastable.
self._mask = np.empty(self.shape, dtype=mask_dtype)
self._mask[...] = ma
elif ma is mask:
# Even if not copying use a view so that shape setting
# does not propagate.
self._mask = mask.copy() if copy else mask.view()
else:
self._mask = ma
mask = property(_get_mask, _set_mask)
# Note: subclass should generally override the unmasked property.
# This one assumes the unmasked data is stored in a private attribute.
@property
def unmasked(self):
"""The unmasked values.
See Also
--------
astropy.utils.masked.Masked.filled
"""
return self._unmasked
def filled(self, fill_value):
"""Get a copy of the underlying data, with masked values filled in.
Parameters
----------
fill_value : object
Value to replace masked values with.
See Also
--------
astropy.utils.masked.Masked.unmasked
"""
unmasked = self.unmasked.copy()
if self.mask.dtype.names:
np.ma.core._recursive_filled(unmasked, self.mask, fill_value)
else:
unmasked[self.mask] = fill_value
return unmasked
def _apply(self, method, *args, **kwargs):
# Required method for NDArrayShapeMethods, to help provide __getitem__
# and shape-changing methods.
if callable(method):
data = method(self.unmasked, *args, **kwargs)
mask = method(self.mask, *args, **kwargs)
else:
data = getattr(self.unmasked, method)(*args, **kwargs)
mask = getattr(self.mask, method)(*args, **kwargs)
result = self.from_unmasked(data, mask, copy=False)
if 'info' in self.__dict__:
result.info = self.info
return result
def __setitem__(self, item, value):
value, mask = self._get_data_and_mask(value, allow_ma_masked=True)
if value is not None:
self.unmasked[item] = value
self.mask[item] = mask
class MaskedInfoBase:
mask_val = np.ma.masked
def __init__(self, bound=False):
super().__init__(bound)
# If bound to a data object instance then create the dict of attributes
# which stores the info attribute values.
if bound:
# Specify how to serialize this object depending on context.
self.serialize_method = {'fits': 'null_value',
'ecsv': 'null_value',
'hdf5': 'data_mask',
'parquet': 'data_mask',
None: 'null_value'}
class MaskedNDArrayInfo(MaskedInfoBase, ParentDtypeInfo):
"""
Container for meta information like name, description, format.
"""
# Add `serialize_method` attribute to the attrs that MaskedNDArrayInfo knows
# about. This allows customization of the way that MaskedColumn objects
# get written to file depending on format. The default is to use whatever
# the writer would normally do, which in the case of FITS or ECSV is to use
# a NULL value within the data itself. If serialize_method is 'data_mask'
# then the mask is explicitly written out as a separate column if there
# are any masked values. This is the same as for MaskedColumn.
attr_names = ParentDtypeInfo.attr_names | {'serialize_method'}
# When `serialize_method` is 'data_mask', and data and mask are being written
# as separate columns, use column names <name> and <name>.mask (instead
# of default encoding as <name>.data and <name>.mask).
_represent_as_dict_primary_data = 'data'
def _represent_as_dict(self):
out = super()._represent_as_dict()
masked_array = self._parent
# If the serialize method for this context (e.g. 'fits' or 'ecsv') is
# 'data_mask', that means to serialize using an explicit mask column.
method = self.serialize_method[self._serialize_context]
if method == 'data_mask':
out['data'] = masked_array.unmasked
if np.any(masked_array.mask):
# Only if there are actually masked elements do we add the ``mask`` column
out['mask'] = masked_array.mask
elif method == 'null_value':
out['data'] = np.ma.MaskedArray(masked_array.unmasked,
mask=masked_array.mask)
else:
raise ValueError('serialize method must be either "data_mask" or "null_value"')
return out
def _construct_from_dict(self, map):
# Override usual handling, since MaskedNDArray takes shape and buffer
# as input, which is less useful here.
# The map can contain either a MaskedColumn or a Column and a mask.
# Extract the mask for the former case.
map.setdefault('mask', getattr(map['data'], 'mask', False))
return self._parent_cls.from_unmasked(**map)
class MaskedArraySubclassInfo(MaskedInfoBase):
"""Mixin class to create a subclasses such as MaskedQuantityInfo."""
# This is used below in __init_subclass__, which also inserts a
# 'serialize_method' attribute in attr_names.
def _represent_as_dict(self):
# Use the data_cls as the class name for serialization,
# so that we do not have to store all possible masked classes
# in astropy.table.serialize.__construct_mixin_classes.
out = super()._represent_as_dict()
data_cls = self._parent._data_cls
out.setdefault('__class__',
data_cls.__module__ + '.' + data_cls.__name__)
return out
def _comparison_method(op):
"""
Create a comparison operator for MaskedNDArray.
Needed since for string dtypes the base operators bypass __array_ufunc__
and hence return unmasked results.
"""
def _compare(self, other):
other_data, other_mask = self._get_data_and_mask(other)
result = getattr(self.unmasked, op)(other_data)
if result is NotImplemented:
return NotImplemented
mask = self.mask | (other_mask if other_mask is not None else False)
return self._masked_result(result, mask, None)
return _compare
class MaskedIterator:
"""
Flat iterator object to iterate over Masked Arrays.
A `~astropy.utils.masked.MaskedIterator` iterator is returned by ``m.flat``
for any masked array ``m``. It allows iterating over the array as if it
were a 1-D array, either in a for-loop or by calling its `next` method.
Iteration is done in C-contiguous style, with the last index varying the
fastest. The iterator can also be indexed using basic slicing or
advanced indexing.
Notes
-----
The design of `~astropy.utils.masked.MaskedIterator` follows that of
`~numpy.ma.core.MaskedIterator`. It is not exported by the
`~astropy.utils.masked` module. Instead of instantiating directly,
use the ``flat`` method in the masked array instance.
"""
def __init__(self, m):
self._masked = m
self._dataiter = m.unmasked.flat
self._maskiter = m.mask.flat
def __iter__(self):
return self
def __getitem__(self, indx):
out = self._dataiter.__getitem__(indx)
mask = self._maskiter.__getitem__(indx)
# For single elements, ndarray.flat.__getitem__ returns scalars; these
# need a new view as a Masked array.
if not isinstance(out, np.ndarray):
out = out[...]
mask = mask[...]
return self._masked.from_unmasked(out, mask, copy=False)
def __setitem__(self, index, value):
data, mask = self._masked._get_data_and_mask(value, allow_ma_masked=True)
if data is not None:
self._dataiter[index] = data
self._maskiter[index] = mask
def __next__(self):
"""
Return the next value, or raise StopIteration.
"""
out = next(self._dataiter)[...]
mask = next(self._maskiter)[...]
return self._masked.from_unmasked(out, mask, copy=False)
next = __next__
class MaskedNDArray(Masked, np.ndarray, base_cls=np.ndarray, data_cls=np.ndarray):
_mask = None
info = MaskedNDArrayInfo()
def __new__(cls, *args, mask=None, **kwargs):
"""Get data class instance from arguments and then set mask."""
self = super().__new__(cls, *args, **kwargs)
if mask is not None:
self.mask = mask
elif self._mask is None:
self.mask = False
return self
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(cls, **kwargs)
# For all subclasses we should set a default __new__ that passes on
# arguments other than mask to the data class, and then sets the mask.
if '__new__' not in cls.__dict__:
def __new__(newcls, *args, mask=None, **kwargs):
"""Get data class instance from arguments and then set mask."""
# Need to explicitly mention classes outside of class definition.
self = super(cls, newcls).__new__(newcls, *args, **kwargs)
if mask is not None:
self.mask = mask
elif self._mask is None:
self.mask = False
return self
cls.__new__ = __new__
if 'info' not in cls.__dict__ and hasattr(cls._data_cls, 'info'):
data_info = cls._data_cls.info
attr_names = data_info.attr_names | {'serialize_method'}
new_info = type(cls.__name__+'Info',
(MaskedArraySubclassInfo, data_info.__class__),
dict(attr_names=attr_names))
cls.info = new_info()
# The two pieces typically overridden.
@classmethod
def from_unmasked(cls, data, mask=None, copy=False):
# Note: have to override since __new__ would use ndarray.__new__
# which expects the shape as its first argument, not an array.
data = np.array(data, subok=True, copy=copy)
self = data.view(cls)
self._set_mask(mask, copy=copy)
return self
@property
def unmasked(self):
return super().view(self._data_cls)
@classmethod
def _get_masked_cls(cls, data_cls):
# Short-cuts
if data_cls is np.ndarray:
return MaskedNDArray
elif data_cls is None: # for .view()
return cls
return super()._get_masked_cls(data_cls)
@property
def flat(self):
"""A 1-D iterator over the Masked array.
This returns a ``MaskedIterator`` instance, which behaves the same
as the `~numpy.flatiter` instance returned by `~numpy.ndarray.flat`,
and is similar to Python's built-in iterator, except that it also
allows assignment.
"""
return MaskedIterator(self)
@property
def _baseclass(self):
"""Work-around for MaskedArray initialization.
Allows the base class to be inferred correctly when a masked instance
is used to initialize (or viewed as) a `~numpy.ma.MaskedArray`.
"""
return self._data_cls
def view(self, dtype=None, type=None):
"""New view of the masked array.
Like `numpy.ndarray.view`, but always returning a masked array subclass.
"""
if type is None and (isinstance(dtype, builtins.type)
and issubclass(dtype, np.ndarray)):
return super().view(self._get_masked_cls(dtype))
if dtype is None:
return super().view(self._get_masked_cls(type))
dtype = np.dtype(dtype)
if not (dtype.itemsize == self.dtype.itemsize
and (dtype.names is None
or len(dtype.names) == len(self.dtype.names))):
raise NotImplementedError(
f"{self.__class__} cannot be viewed with a dtype with a "
f"with a different number of fields or size.")
return super().view(dtype, self._get_masked_cls(type))
def __array_finalize__(self, obj):
# If we're a new object or viewing an ndarray, nothing has to be done.
if obj is None or obj.__class__ is np.ndarray:
return
# Logically, this should come from ndarray and hence be None, but
# just in case someone creates a new mixin, we check.
super_array_finalize = super().__array_finalize__
if super_array_finalize: # pragma: no cover
super_array_finalize(obj)
if self._mask is None:
# Got here after, e.g., a view of another masked class.
# Get its mask, or initialize ours.
self._set_mask(getattr(obj, '_mask', False))
if 'info' in obj.__dict__:
self.info = obj.info
@property
def shape(self):
"""The shape of the data and the mask.
Usually used to get the current shape of an array, but may also be
used to reshape the array in-place by assigning a tuple of array
dimensions to it. As with `numpy.reshape`, one of the new shape
dimensions can be -1, in which case its value is inferred from the
size of the array and the remaining dimensions.
Raises
------
AttributeError
If a copy is required, of either the data or the mask.
"""
# Redefinition to allow defining a setter and add a docstring.
return super().shape
@shape.setter
def shape(self, shape):
old_shape = self.shape
self._mask.shape = shape
# Reshape array proper in try/except just in case some broadcasting
# or so causes it to fail.
try:
super(MaskedNDArray, type(self)).shape.__set__(self, shape)
except Exception as exc:
self._mask.shape = old_shape
# Given that the mask reshaping succeeded, the only logical
# reason for an exception is something like a broadcast error in
# in __array_finalize__, or a different memory ordering between
# mask and data. For those, give a more useful error message;
# otherwise just raise the error.
if 'could not broadcast' in exc.args[0]:
raise AttributeError(
'Incompatible shape for in-place modification. '
'Use `.reshape()` to make a copy with the desired '
'shape.') from None
else: # pragma: no cover
raise
_eq_simple = _comparison_method('__eq__')
_ne_simple = _comparison_method('__ne__')
__lt__ = _comparison_method('__lt__')
__le__ = _comparison_method('__le__')
__gt__ = _comparison_method('__gt__')
__ge__ = _comparison_method('__ge__')
def __eq__(self, other):
if not self.dtype.names:
return self._eq_simple(other)
# For structured arrays, we treat this as a reduction over the fields,
# where masked fields are skipped and thus do not influence the result.
other = np.asanyarray(other, dtype=self.dtype)
result = np.stack([self[field] == other[field]
for field in self.dtype.names], axis=-1)
return result.all(axis=-1)
def __ne__(self, other):
if not self.dtype.names:
return self._ne_simple(other)
# For structured arrays, we treat this as a reduction over the fields,
# where masked fields are skipped and thus do not influence the result.
other = np.asanyarray(other, dtype=self.dtype)
result = np.stack([self[field] != other[field]
for field in self.dtype.names], axis=-1)
return result.any(axis=-1)
def _combine_masks(self, masks, out=None):
masks = [m for m in masks if m is not None and m is not False]
if not masks:
return False
if len(masks) == 1:
if out is None:
return masks[0].copy()
else:
np.copyto(out, masks[0])
return out
out = np.logical_or(masks[0], masks[1], out=out)
for mask in masks[2:]:
np.logical_or(out, mask, out=out)
return out
def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):
out = kwargs.pop('out', None)
out_unmasked = None
out_mask = None
if out is not None:
out_unmasked, out_masks = self._get_data_and_masks(*out)
for d, m in zip(out_unmasked, out_masks):
if m is None:
# TODO: allow writing to unmasked output if nothing is masked?
if d is not None:
raise TypeError('cannot write to unmasked output')
elif out_mask is None:
out_mask = m
unmasked, masks = self._get_data_and_masks(*inputs)
if ufunc.signature:
# We're dealing with a gufunc. For now, only deal with
# np.matmul and gufuncs for which the mask of any output always
# depends on all core dimension values of all inputs.
# Also ignore axes keyword for now...
# TODO: in principle, it should be possible to generate the mask
# purely based on the signature.
if 'axes' in kwargs:
raise NotImplementedError("Masked does not yet support gufunc "
"calls with 'axes'.")
if ufunc is np.matmul:
# np.matmul is tricky and its signature cannot be parsed by
# _parse_gufunc_signature.
unmasked = np.atleast_1d(*unmasked)
mask0, mask1 = masks
masks = []
is_mat1 = unmasked[1].ndim >= 2
if mask0 is not None:
masks.append(
np.logical_or.reduce(mask0, axis=-1, keepdims=is_mat1))
if mask1 is not None:
masks.append(
np.logical_or.reduce(mask1, axis=-2, keepdims=True)
if is_mat1 else
np.logical_or.reduce(mask1))
mask = self._combine_masks(masks, out=out_mask)
else:
# Parse signature with private numpy function. Note it
# cannot handle spaces in tuples, so remove those.
in_sig, out_sig = np.lib.function_base._parse_gufunc_signature(
ufunc.signature.replace(' ', ''))
axis = kwargs.get('axis', -1)
keepdims = kwargs.get('keepdims', False)
in_masks = []
for sig, mask in zip(in_sig, masks):
if mask is not None:
if sig:
# Input has core dimensions. Assume that if any
# value in those is masked, the output will be
# masked too (TODO: for multiple core dimensions
# this may be too strong).
mask = np.logical_or.reduce(
mask, axis=axis, keepdims=keepdims)
in_masks.append(mask)
mask = self._combine_masks(in_masks)
result_masks = []
for os in out_sig:
if os:
# Output has core dimensions. Assume all those
# get the same mask.
result_mask = np.expand_dims(mask, axis)
else:
result_mask = mask
result_masks.append(result_mask)
mask = result_masks if len(result_masks) > 1 else result_masks[0]
elif method == '__call__':
# Regular ufunc call.
mask = self._combine_masks(masks, out=out_mask)
elif method == 'outer':
# Must have two arguments; adjust masks as will be done for data.
assert len(masks) == 2
masks = [(m if m is not None else False) for m in masks]
mask = np.logical_or.outer(masks[0], masks[1], out=out_mask)
elif method in {'reduce', 'accumulate'}:
# Reductions like np.add.reduce (sum).
if masks[0] is not None:
# By default, we simply propagate masks, since for
# things like np.sum, it makes no sense to do otherwise.
# Individual methods need to override as needed.
# TODO: take care of 'out' too?
if method == 'reduce':
axis = kwargs.get('axis', None)
keepdims = kwargs.get('keepdims', False)
where = kwargs.get('where', True)
mask = np.logical_or.reduce(masks[0], where=where,
axis=axis, keepdims=keepdims,
out=out_mask)
if where is not True:
# Mask also whole rows that were not selected by where,
# so would have been left as unmasked above.
mask |= np.logical_and.reduce(masks[0], where=where,
axis=axis, keepdims=keepdims)
else:
# Accumulate
axis = kwargs.get('axis', 0)
mask = np.logical_or.accumulate(masks[0], axis=axis,
out=out_mask)
elif out is not None:
mask = False
else: # pragma: no cover
# Can only get here if neither input nor output was masked, but
# perhaps axis or where was masked (in numpy < 1.21 this is
# possible). We don't support this.
return NotImplemented
elif method in {'reduceat', 'at'}: # pragma: no cover
# TODO: implement things like np.add.accumulate (used for cumsum).
raise NotImplementedError("masked instances cannot yet deal with "
"'reduceat' or 'at'.")
if out_unmasked is not None:
kwargs['out'] = out_unmasked
result = getattr(ufunc, method)(*unmasked, **kwargs)
if result is None: # pragma: no cover
# This happens for the "at" method.
return result
if out is not None and len(out) == 1:
out = out[0]
return self._masked_result(result, mask, out)
def __array_function__(self, function, types, args, kwargs):
# TODO: go through functions systematically to see which ones
# work and/or can be supported.
if function in MASKED_SAFE_FUNCTIONS:
return super().__array_function__(function, types, args, kwargs)
elif function in APPLY_TO_BOTH_FUNCTIONS:
helper = APPLY_TO_BOTH_FUNCTIONS[function]
try:
helper_result = helper(*args, **kwargs)
except NotImplementedError:
return self._not_implemented_or_raise(function, types)
data_args, mask_args, kwargs, out = helper_result
if out is not None:
if not isinstance(out, Masked):
return self._not_implemented_or_raise(function, types)
function(*mask_args, out=out.mask, **kwargs)
function(*data_args, out=out.unmasked, **kwargs)
return out
mask = function(*mask_args, **kwargs)
result = function(*data_args, **kwargs)
elif function in DISPATCHED_FUNCTIONS:
dispatched_function = DISPATCHED_FUNCTIONS[function]
try:
dispatched_result = dispatched_function(*args, **kwargs)
except NotImplementedError:
return self._not_implemented_or_raise(function, types)
if not isinstance(dispatched_result, tuple):
return dispatched_result
result, mask, out = dispatched_result
elif function in UNSUPPORTED_FUNCTIONS:
return NotImplemented
else: # pragma: no cover
# By default, just pass it through for now.
return super().__array_function__(function, types, args, kwargs)
if mask is None:
return result
else:
return self._masked_result(result, mask, out)
def _not_implemented_or_raise(self, function, types):
# Our function helper or dispatcher found that the function does not
# work with Masked. In principle, there may be another class that
# knows what to do with us, for which we should return NotImplemented.
# But if there is ndarray (or a non-Masked subclass of it) around,
# it quite likely coerces, so we should just break.
if any(issubclass(t, np.ndarray) and not issubclass(t, Masked)
for t in types):
raise TypeError("the MaskedNDArray implementation cannot handle {} "
"with the given arguments."
.format(function)) from None
else:
return NotImplemented
def _masked_result(self, result, mask, out):
if isinstance(result, tuple):
if out is None:
out = (None,) * len(result)
if not isinstance(mask, (list, tuple)):
mask = (mask,) * len(result)
return tuple(self._masked_result(result_, mask_, out_)
for (result_, mask_, out_) in zip(result, mask, out))
if out is None:
# Note that we cannot count on result being the same class as
# 'self' (e.g., comparison of quantity results in an ndarray, most
# operations on Longitude and Latitude result in Angle or
# Quantity), so use Masked to determine the appropriate class.
return Masked(result, mask)
# TODO: remove this sanity check once test cases are more complete.
assert isinstance(out, Masked)
# If we have an output, the result was written in-place, so we should
# also write the mask in-place (if not done already in the code).
if out._mask is not mask:
out._mask[...] = mask
return out
# Below are ndarray methods that need to be overridden as masked elements
# need to be skipped and/or an initial value needs to be set.
def _reduce_defaults(self, kwargs, initial_func=None):
"""Get default where and initial for masked reductions.
Generally, the default should be to skip all masked elements. For
reductions such as np.minimum.reduce, we also need an initial value,
which can be determined using ``initial_func``.
"""
if 'where' not in kwargs:
kwargs['where'] = ~self.mask
if initial_func is not None and 'initial' not in kwargs:
kwargs['initial'] = initial_func(self.unmasked)
return kwargs
def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None):
# Unfortunately, cannot override the call to diagonal inside trace, so
# duplicate implementation in numpy/core/src/multiarray/calculation.c.
diagonal = self.diagonal(offset=offset, axis1=axis1, axis2=axis2)
return diagonal.sum(-1, dtype=dtype, out=out)
def min(self, axis=None, out=None, **kwargs):
return super().min(axis=axis, out=out,
**self._reduce_defaults(kwargs, np.nanmax))
def max(self, axis=None, out=None, **kwargs):
return super().max(axis=axis, out=out,
**self._reduce_defaults(kwargs, np.nanmin))
def nonzero(self):
unmasked_nonzero = self.unmasked.nonzero()
if self.ndim >= 1:
not_masked = ~self.mask[unmasked_nonzero]
return tuple(u[not_masked] for u in unmasked_nonzero)
else:
return unmasked_nonzero if not self.mask else np.nonzero(0)
def compress(self, condition, axis=None, out=None):
if out is not None:
raise NotImplementedError('cannot yet give output')
return self._apply('compress', condition, axis=axis)
def repeat(self, repeats, axis=None):
return self._apply('repeat', repeats, axis=axis)
def choose(self, choices, out=None, mode='raise'):
# Let __array_function__ take care since choices can be masked too.
return np.choose(self, choices, out=out, mode=mode)
def argmin(self, axis=None, out=None):
# Todo: should this return a masked integer array, with masks
# if all elements were masked?
at_min = self == self.min(axis=axis, keepdims=True)
return at_min.filled(False).argmax(axis=axis, out=out)
def argmax(self, axis=None, out=None):
at_max = self == self.max(axis=axis, keepdims=True)
return at_max.filled(False).argmax(axis=axis, out=out)
def argsort(self, axis=-1, kind=None, order=None):
"""Returns the indices that would sort an array.
Perform an indirect sort along the given axis on both the array
and the mask, with masked items being sorted to the end.
Parameters
----------
axis : int or None, optional
Axis along which to sort. The default is -1 (the last axis).
If None, the flattened array is used.
kind : str or None, ignored.
The kind of sort. Present only to allow subclasses to work.
order : str or list of str.
For an array with fields defined, the fields to compare first,
second, etc. A single field can be specified as a string, and not
all fields need be specified, but unspecified fields will still be
used, in dtype order, to break ties.
Returns
-------
index_array : ndarray, int
Array of indices that sorts along the specified ``axis``. Use
``np.take_along_axis(self, index_array, axis=axis)`` to obtain
the sorted array.
"""
if axis is None:
data = self.ravel()
axis = -1
else:
data = self
if self.dtype.names:
# As done inside the argsort implementation in multiarray/methods.c.
if order is None:
order = self.dtype.names
else:
order = np.core._internal._newnames(self.dtype, order)
keys = tuple(data[name] for name in order[::-1])
elif order is not None:
raise ValueError('Cannot specify order when the array has no fields.')
else:
keys = (data,)
return np.lexsort(keys, axis=axis)
def sort(self, axis=-1, kind=None, order=None):
"""Sort an array in-place. Refer to `numpy.sort` for full documentation."""
# TODO: probably possible to do this faster than going through argsort!
indices = self.argsort(axis, kind=kind, order=order)
self[:] = np.take_along_axis(self, indices, axis=axis)
def argpartition(self, kth, axis=-1, kind='introselect', order=None):
# TODO: should be possible to do this faster than with a full argsort!
return self.argsort(axis=axis, order=order)
def partition(self, kth, axis=-1, kind='introselect', order=None):
# TODO: should be possible to do this faster than with a full argsort!
return self.sort(axis=axis, order=None)
def cumsum(self, axis=None, dtype=None, out=None):
if axis is None:
self = self.ravel()
axis = 0
return np.add.accumulate(self, axis=axis, dtype=dtype, out=out)
def cumprod(self, axis=None, dtype=None, out=None):
if axis is None:
self = self.ravel()
axis = 0
return np.multiply.accumulate(self, axis=axis, dtype=dtype, out=out)
def clip(self, min=None, max=None, out=None, **kwargs):
"""Return an array whose values are limited to ``[min, max]``.
Like `~numpy.clip`, but any masked values in ``min`` and ``max``
are ignored for clipping. The mask of the input array is propagated.
"""
# TODO: implement this at the ufunc level.
dmin, mmin = self._get_data_and_mask(min)
dmax, mmax = self._get_data_and_mask(max)
if mmin is None and mmax is None:
# Fast path for unmasked max, min.
return super().clip(min, max, out=out, **kwargs)
masked_out = np.positive(self, out=out)
out = masked_out.unmasked
if dmin is not None:
np.maximum(out, dmin, out=out, where=True if mmin is None else ~mmin)
if dmax is not None:
np.minimum(out, dmax, out=out, where=True if mmax is None else ~mmax)
return masked_out
def mean(self, axis=None, dtype=None, out=None, keepdims=False):
# Implementation based on that in numpy/core/_methods.py
# Cast bool, unsigned int, and int to float64 by default,
# and do float16 at higher precision.
is_float16_result = False
if dtype is None:
if issubclass(self.dtype.type, (np.integer, np.bool_)):
dtype = np.dtype('f8')
elif issubclass(self.dtype.type, np.float16):
dtype = np.dtype('f4')
is_float16_result = out is None
result = self.sum(axis=axis, dtype=dtype, out=out,
keepdims=keepdims, where=~self.mask)
n = np.add.reduce(~self.mask, axis=axis, keepdims=keepdims)
result /= n
if is_float16_result:
result = result.astype(self.dtype)
return result
def var(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False):
# Simplified implementation based on that in numpy/core/_methods.py
n = np.add.reduce(~self.mask, axis=axis, keepdims=keepdims)[...]
# Cast bool, unsigned int, and int to float64 by default.
if dtype is None and issubclass(self.dtype.type,
(np.integer, np.bool_)):
dtype = np.dtype('f8')
mean = self.mean(axis=axis, dtype=dtype, keepdims=True)
x = self - mean
x *= x.conjugate() # Conjugate just returns x if not complex.
result = x.sum(axis=axis, dtype=dtype, out=out,
keepdims=keepdims, where=~x.mask)
n -= ddof
n = np.maximum(n, 0, out=n)
result /= n
result._mask |= (n == 0)
return result
def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False):
result = self.var(axis=axis, dtype=dtype, out=out, ddof=ddof,
keepdims=keepdims)
return np.sqrt(result, out=result)
def __bool__(self):
# First get result from array itself; this will error if not a scalar.
result = super().__bool__()
return result and not self.mask
def any(self, axis=None, out=None, keepdims=False):
return np.logical_or.reduce(self, axis=axis, out=out,
keepdims=keepdims, where=~self.mask)
def all(self, axis=None, out=None, keepdims=False):
return np.logical_and.reduce(self, axis=axis, out=out,
keepdims=keepdims, where=~self.mask)
# Following overrides needed since somehow the ndarray implementation
# does not actually call these.
def __str__(self):
return np.array_str(self)
def __repr__(self):
return np.array_repr(self)
def __format__(self, format_spec):
string = super().__format__(format_spec)
if self.shape == () and self.mask:
n = min(3, max(1, len(string)))
return ' ' * (len(string)-n) + '\u2014' * n
else:
return string
class MaskedRecarray(np.recarray, MaskedNDArray, data_cls=np.recarray):
# Explicit definition since we need to override some methods.
def __array_finalize__(self, obj):
# recarray.__array_finalize__ does not do super, so we do it
# explicitly.
super().__array_finalize__(obj)
super(np.recarray, self).__array_finalize__(obj)
# __getattribute__, __setattr__, and field use these somewhat
# obscrure ndarray methods. TODO: override in MaskedNDArray?
def getfield(self, dtype, offset=0):
for field, info in self.dtype.fields.items():
if offset == info[1] and dtype == info[0]:
return self[field]
raise NotImplementedError('can only get existing field from '
'structured dtype.')
def setfield(self, val, dtype, offset=0):
for field, info in self.dtype.fields.items():
if offset == info[1] and dtype == info[0]:
self[field] = val
return
raise NotImplementedError('can only set existing field from '
'structured dtype.')
| pllim/astropy | astropy/utils/masked/core.py | Python | bsd-3-clause | 45,844 |
import json
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
class TestSubmitService(TestCase):
fixtures = ['test_users', 'test_contest', 'test_full_package',
'test_problem_instance', 'test_messages', 'test_templates',
'test_submitservice']
def test_submit(self):
ufile = SimpleUploadedFile('file.cpp', "int main() {}")
url = reverse('oioioi.submitservice.views.submit_view',
kwargs={'contest_id': 'c'})
response = self.client.post(url, {
'file': ufile,
'task': 'zad1',
'token': '123456ABCDEF'
})
response_data = json.loads(response.content)
self.assertEqual(response_data['result_url'], '/c/c/s/1/')
def test_view_user_token(self):
url = reverse('oioioi.submitservice.views.view_user_token',
kwargs={'contest_id': 'c'})
self.client.login(username='test_user')
response = self.client.get(url)
self.assertIn('123456ABCDEF', response.content)
def test_clear_user_token(self):
url = reverse('oioioi.submitservice.views.clear_user_token',
kwargs={'contest_id': 'c'})
self.client.login(username='test_user')
self.client.post(url)
url = reverse('oioioi.submitservice.views.view_user_token',
kwargs={'contest_id': 'c'})
response = self.client.get(url)
self.assertNotIn('123456ABCDEF', response.content)
| papedaniel/oioioi | oioioi/submitservice/tests.py | Python | gpl-3.0 | 1,593 |
from RMPY.rig import rigSingleJoint
from RMPY.rig import rigBase
import pymel.core as pm
class RigPropModel(rigBase.BaseModel):
def __init__(self):
super(RigPropModel, self).__init__()
self.single_joints = []
class RigProp(rigBase.RigBase):
def __init__(self, *args, **kwargs):
super(RigProp, self).__init__(*args, **kwargs)
self._model = RigPropModel()
@property
def single_joints(self):
return self._model.single_joints
def create_point_base(self, *points, **kwargs):
depth = kwargs.pop('depth', 2)
size = kwargs.pop('size', 1.0)
size_step = kwargs.pop('size_step', size/10.0)
offset_visibility = kwargs.pop('offset_visibility', False)
single_joint = rigSingleJoint.RigSingleJoint()
self.single_joints.append(single_joint)
for each_point in points:
for index in range(depth):
single_joint.create_point_base(each_point, size=size + size_step*index, **kwargs)
if index >= 1:
single_joint.reset_controls[-1].setParent(single_joint.controls[-2])
if offset_visibility:
single_joint.controls[0].offset_visibility >> single_joint.reset_controls[-1].visibility
else:
if offset_visibility:
single_joint.controls[0].addAttr('offset_visibility', at='bool', k=True)
# self.controls = single_joint.controls
# self.reset_controls = single_joint.reset_controls
# self.joints = single_joint.joints
# self.reset_joints = single_joint.reset_joints
if __name__ == '__main__':
reference_points = pm.ls('C_Hip01_reference_pnt')[0]
rig_prop = RigProp()
rig_prop.create_point_base(reference_points, type='box', centered=True) | rendermotion/RMMel | rig/rigProp.py | Python | lgpl-3.0 | 1,843 |
"""Tests for HTMLParser.py."""
import html.parser
import pprint
import unittest
from test import support
class EventCollector(html.parser.HTMLParser):
def __init__(self, *args, **kw):
self.events = []
self.append = self.events.append
html.parser.HTMLParser.__init__(self, *args, **kw)
def get_events(self):
# Normalize the list of events so that buffer artefacts don't
# separate runs of contiguous characters.
L = []
prevtype = None
for event in self.events:
type = event[0]
if type == prevtype == "data":
L[-1] = ("data", L[-1][1] + event[1])
else:
L.append(event)
prevtype = type
self.events = L
return L
# structure markup
def handle_starttag(self, tag, attrs):
self.append(("starttag", tag, attrs))
def handle_startendtag(self, tag, attrs):
self.append(("startendtag", tag, attrs))
def handle_endtag(self, tag):
self.append(("endtag", tag))
# all other markup
def handle_comment(self, data):
self.append(("comment", data))
def handle_charref(self, data):
self.append(("charref", data))
def handle_data(self, data):
self.append(("data", data))
def handle_decl(self, data):
self.append(("decl", data))
def handle_entityref(self, data):
self.append(("entityref", data))
def handle_pi(self, data):
self.append(("pi", data))
def unknown_decl(self, decl):
self.append(("unknown decl", decl))
class EventCollectorExtra(EventCollector):
def handle_starttag(self, tag, attrs):
EventCollector.handle_starttag(self, tag, attrs)
self.append(("starttag_text", self.get_starttag_text()))
class TestCaseBase(unittest.TestCase):
def _run_check(self, source, expected_events, collector=None):
if collector is None:
collector = EventCollector()
parser = collector
for s in source:
parser.feed(s)
parser.close()
events = parser.get_events()
if events != expected_events:
self.fail("received events did not match expected events\n"
"Expected:\n" + pprint.pformat(expected_events) +
"\nReceived:\n" + pprint.pformat(events))
def _run_check_extra(self, source, events):
self._run_check(source, events, EventCollectorExtra())
def _parse_error(self, source):
def parse(source=source):
parser = html.parser.HTMLParser()
parser.feed(source)
parser.close()
self.assertRaises(html.parser.HTMLParseError, parse)
class HTMLParserTestCase(TestCaseBase):
def test_processing_instruction_only(self):
self._run_check("<?processing instruction>", [
("pi", "processing instruction"),
])
self._run_check("<?processing instruction ?>", [
("pi", "processing instruction ?"),
])
def test_simple_html(self):
self._run_check("""
<!DOCTYPE html PUBLIC 'foo'>
<HTML>&entity; 
<!--comment1a
-></foo><bar><<?pi?></foo<bar
comment1b-->
<Img sRc='Bar' isMAP>sample
text
“
<!--comment2a-- --comment2b--><!>
</Html>
""", [
("data", "\n"),
("decl", "DOCTYPE html PUBLIC 'foo'"),
("data", "\n"),
("starttag", "html", []),
("entityref", "entity"),
("charref", "32"),
("data", "\n"),
("comment", "comment1a\n-></foo><bar><<?pi?></foo<bar\ncomment1b"),
("data", "\n"),
("starttag", "img", [("src", "Bar"), ("ismap", None)]),
("data", "sample\ntext\n"),
("charref", "x201C"),
("data", "\n"),
("comment", "comment2a-- --comment2b"),
("data", "\n"),
("endtag", "html"),
("data", "\n"),
])
def test_malformatted_charref(self):
self._run_check("<p>&#bad;</p>", [
("starttag", "p", []),
("data", "&#bad;"),
("endtag", "p"),
])
def test_unclosed_entityref(self):
self._run_check("&entityref foo", [
("entityref", "entityref"),
("data", " foo"),
])
def test_doctype_decl(self):
inside = """\
DOCTYPE html [
<!ELEMENT html - O EMPTY>
<!ATTLIST html
version CDATA #IMPLIED
profile CDATA 'DublinCore'>
<!NOTATION datatype SYSTEM 'http://xml.python.org/notations/python-module'>
<!ENTITY myEntity 'internal parsed entity'>
<!ENTITY anEntity SYSTEM 'http://xml.python.org/entities/something.xml'>
<!ENTITY % paramEntity 'name|name|name'>
%paramEntity;
<!-- comment -->
]"""
self._run_check("<!%s>" % inside, [
("decl", inside),
])
def test_bad_nesting(self):
# Strangely, this *is* supposed to test that overlapping
# elements are allowed. HTMLParser is more geared toward
# lexing the input that parsing the structure.
self._run_check("<a><b></a></b>", [
("starttag", "a", []),
("starttag", "b", []),
("endtag", "a"),
("endtag", "b"),
])
def test_bare_ampersands(self):
self._run_check("this text & contains & ampersands &", [
("data", "this text & contains & ampersands &"),
])
def test_bare_pointy_brackets(self):
self._run_check("this < text > contains < bare>pointy< brackets", [
("data", "this < text > contains < bare>pointy< brackets"),
])
def test_attr_syntax(self):
output = [
("starttag", "a", [("b", "v"), ("c", "v"), ("d", "v"), ("e", None)])
]
self._run_check("""<a b='v' c="v" d=v e>""", output)
self._run_check("""<a b = 'v' c = "v" d = v e>""", output)
self._run_check("""<a\nb\n=\n'v'\nc\n=\n"v"\nd\n=\nv\ne>""", output)
self._run_check("""<a\tb\t=\t'v'\tc\t=\t"v"\td\t=\tv\te>""", output)
def test_attr_values(self):
self._run_check("""<a b='xxx\n\txxx' c="yyy\t\nyyy" d='\txyz\n'>""",
[("starttag", "a", [("b", "xxx\n\txxx"),
("c", "yyy\t\nyyy"),
("d", "\txyz\n")])
])
self._run_check("""<a b='' c="">""", [
("starttag", "a", [("b", ""), ("c", "")]),
])
# Regression test for SF patch #669683.
self._run_check("<e a=rgb(1,2,3)>", [
("starttag", "e", [("a", "rgb(1,2,3)")]),
])
# Regression test for SF bug #921657.
self._run_check("<a href=mailto:[email protected]>", [
("starttag", "a", [("href", "mailto:[email protected]")]),
])
def test_attr_nonascii(self):
# see issue 7311
self._run_check("<img src=/foo/bar.png alt=\u4e2d\u6587>", [
("starttag", "img", [("src", "/foo/bar.png"),
("alt", "\u4e2d\u6587")]),
])
self._run_check("<a title='\u30c6\u30b9\u30c8' "
"href='\u30c6\u30b9\u30c8.html'>", [
("starttag", "a", [("title", "\u30c6\u30b9\u30c8"),
("href", "\u30c6\u30b9\u30c8.html")]),
])
self._run_check('<a title="\u30c6\u30b9\u30c8" '
'href="\u30c6\u30b9\u30c8.html">', [
("starttag", "a", [("title", "\u30c6\u30b9\u30c8"),
("href", "\u30c6\u30b9\u30c8.html")]),
])
def test_attr_entity_replacement(self):
self._run_check("""<a b='&><"''>""", [
("starttag", "a", [("b", "&><\"'")]),
])
def test_attr_funky_names(self):
self._run_check("""<a a.b='v' c:d=v e-f=v>""", [
("starttag", "a", [("a.b", "v"), ("c:d", "v"), ("e-f", "v")]),
])
def test_illegal_declarations(self):
self._parse_error('<!spacer type="block" height="25">')
def test_starttag_end_boundary(self):
self._run_check("""<a b='<'>""", [("starttag", "a", [("b", "<")])])
self._run_check("""<a b='>'>""", [("starttag", "a", [("b", ">")])])
def test_buffer_artefacts(self):
output = [("starttag", "a", [("b", "<")])]
self._run_check(["<a b='<'>"], output)
self._run_check(["<a ", "b='<'>"], output)
self._run_check(["<a b", "='<'>"], output)
self._run_check(["<a b=", "'<'>"], output)
self._run_check(["<a b='<", "'>"], output)
self._run_check(["<a b='<'", ">"], output)
output = [("starttag", "a", [("b", ">")])]
self._run_check(["<a b='>'>"], output)
self._run_check(["<a ", "b='>'>"], output)
self._run_check(["<a b", "='>'>"], output)
self._run_check(["<a b=", "'>'>"], output)
self._run_check(["<a b='>", "'>"], output)
self._run_check(["<a b='>'", ">"], output)
output = [("comment", "abc")]
self._run_check(["", "<!--abc-->"], output)
self._run_check(["<", "!--abc-->"], output)
self._run_check(["<!", "--abc-->"], output)
self._run_check(["<!-", "-abc-->"], output)
self._run_check(["<!--", "abc-->"], output)
self._run_check(["<!--a", "bc-->"], output)
self._run_check(["<!--ab", "c-->"], output)
self._run_check(["<!--abc", "-->"], output)
self._run_check(["<!--abc-", "->"], output)
self._run_check(["<!--abc--", ">"], output)
self._run_check(["<!--abc-->", ""], output)
def test_starttag_junk_chars(self):
self._parse_error("</>")
self._parse_error("</$>")
self._parse_error("</")
self._parse_error("</a")
self._parse_error("<a<a>")
self._parse_error("</a<a>")
self._parse_error("<!")
self._parse_error("<a $>")
self._parse_error("<a")
self._parse_error("<a foo='bar'")
self._parse_error("<a foo='bar")
self._parse_error("<a foo='>'")
self._parse_error("<a foo='>")
self._parse_error("<a foo=>")
def test_declaration_junk_chars(self):
self._parse_error("<!DOCTYPE foo $ >")
def test_startendtag(self):
self._run_check("<p/>", [
("startendtag", "p", []),
])
self._run_check("<p></p>", [
("starttag", "p", []),
("endtag", "p"),
])
self._run_check("<p><img src='foo' /></p>", [
("starttag", "p", []),
("startendtag", "img", [("src", "foo")]),
("endtag", "p"),
])
def test_get_starttag_text(self):
s = """<foo:bar \n one="1"\ttwo=2 >"""
self._run_check_extra(s, [
("starttag", "foo:bar", [("one", "1"), ("two", "2")]),
("starttag_text", s)])
def test_cdata_content(self):
s = """<script> <!-- not a comment --> ¬-an-entity-ref; </script>"""
self._run_check(s, [
("starttag", "script", []),
("data", " <!-- not a comment --> ¬-an-entity-ref; "),
("endtag", "script"),
])
s = """<script> <not a='start tag'> </script>"""
self._run_check(s, [
("starttag", "script", []),
("data", " <not a='start tag'> "),
("endtag", "script"),
])
def test_entityrefs_in_attributes(self):
self._run_check("<html foo='€&aa&unsupported;'>", [
("starttag", "html", [("foo", "\u20AC&aa&unsupported;")])
])
class HTMLParserTolerantTestCase(TestCaseBase):
def setUp(self):
self.collector = EventCollector(strict=False)
def test_tolerant_parsing(self):
self._run_check('<html <html>te>>xt&a<<bc</a></html>\n'
'<img src="URL><//img></html</html>', [
('data', '<html '),
('starttag', 'html', []),
('data', 'te>>xt'),
('entityref', 'a'),
('data', '<<bc'),
('endtag', 'a'),
('endtag', 'html'),
('data', '\n<img src="URL><//img></html'),
('endtag', 'html')],
collector = self.collector)
def test_comma_between_attributes(self):
self._run_check('<form action="/xxx.php?a=1&b=2&", '
'method="post">', [
('starttag', 'form',
[('action', '/xxx.php?a=1&b=2&'),
('method', 'post')])],
collector = self.collector)
def test_weird_chars_in_unquoted_attribute_values(self):
self._run_check('<form action=bogus|&#()value>', [
('starttag', 'form',
[('action', 'bogus|&#()value')])],
collector = self.collector)
def test_unescape_function(self):
p = html.parser.HTMLParser()
self.assertEqual(p.unescape('&#bad;'),'&#bad;')
self.assertEqual(p.unescape('&'),'&')
def test_main():
support.run_unittest(HTMLParserTestCase, HTMLParserTolerantTestCase)
if __name__ == "__main__":
test_main()
| invisiblek/python-for-android | python3-alpha/python3-src/Lib/test/test_htmlparser.py | Python | apache-2.0 | 13,425 |
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for nova data.
"""
from oslo.config import cfg
from oslo.db.sqlalchemy import models
from sqlalchemy import Column, Index, Integer, BigInteger, Enum, String, schema
from sqlalchemy.dialects.mysql import MEDIUMTEXT
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import orm
from sqlalchemy import ForeignKey, DateTime, Boolean, Text, Float
from nova.db.sqlalchemy import types
from nova.openstack.common import timeutils
CONF = cfg.CONF
BASE = declarative_base()
def MediumText():
return Text().with_variant(MEDIUMTEXT(), 'mysql')
class NovaBase(models.SoftDeleteMixin,
models.TimestampMixin,
models.ModelBase):
metadata = None
# TODO(ekudryashova): remove this after both nova and oslo.db
# will use oslo.utils library
# NOTE: Both projects(nova and oslo.db) use `timeutils.utcnow`, which
# returns specified time(if override_time is set). Time overriding is
# only used by unit tests, but in a lot of places, temporarily overriding
# this columns helps to avoid lots of calls of timeutils.set_override
# from different places in unit tests.
created_at = Column(DateTime, default=lambda: timeutils.utcnow())
updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow())
def save(self, session=None):
from nova.db.sqlalchemy import api
if session is None:
session = api.get_session()
super(NovaBase, self).save(session=session)
class Service(BASE, NovaBase):
"""Represents a running service on a host."""
__tablename__ = 'services'
__table_args__ = (
schema.UniqueConstraint("host", "topic", "deleted",
name="uniq_services0host0topic0deleted"),
schema.UniqueConstraint("host", "binary", "deleted",
name="uniq_services0host0binary0deleted")
)
id = Column(Integer, primary_key=True)
host = Column(String(255)) # , ForeignKey('hosts.id'))
binary = Column(String(255))
topic = Column(String(255))
report_count = Column(Integer, nullable=False, default=0)
disabled = Column(Boolean, default=False)
disabled_reason = Column(String(255))
class ComputeNode(BASE, NovaBase):
"""Represents a running compute service on a host."""
__tablename__ = 'compute_nodes'
__table_args__ = ()
id = Column(Integer, primary_key=True)
service_id = Column(Integer, ForeignKey('services.id'), nullable=False)
service = orm.relationship(Service,
backref=orm.backref('compute_node'),
foreign_keys=service_id,
primaryjoin='and_('
'ComputeNode.service_id == Service.id,'
'ComputeNode.deleted == 0)')
vcpus = Column(Integer, nullable=False)
memory_mb = Column(Integer, nullable=False)
local_gb = Column(Integer, nullable=False)
vcpus_used = Column(Integer, nullable=False)
memory_mb_used = Column(Integer, nullable=False)
local_gb_used = Column(Integer, nullable=False)
hypervisor_type = Column(MediumText(), nullable=False)
hypervisor_version = Column(Integer, nullable=False)
hypervisor_hostname = Column(String(255))
# Free Ram, amount of activity (resize, migration, boot, etc) and
# the number of running VM's are a good starting point for what's
# important when making scheduling decisions.
free_ram_mb = Column(Integer)
free_disk_gb = Column(Integer)
current_workload = Column(Integer)
running_vms = Column(Integer)
# Note(masumotok): Expected Strings example:
#
# '{"arch":"x86_64",
# "model":"Nehalem",
# "topology":{"sockets":1, "threads":2, "cores":3},
# "features":["tdtscp", "xtpr"]}'
#
# Points are "json translatable" and it must have all dictionary keys
# above, since it is copied from <cpu> tag of getCapabilities()
# (See libvirt.virtConnection).
cpu_info = Column(MediumText(), nullable=False)
disk_available_least = Column(Integer)
host_ip = Column(types.IPAddress())
supported_instances = Column(Text)
metrics = Column(Text)
# Note(yongli): json string PCI Stats
# '{"vendor_id":"8086", "product_id":"1234", "count":3 }'
pci_stats = Column(Text)
# extra_resources is a json string containing arbitrary
# data about additional resources.
extra_resources = Column(Text)
# json-encode string containing compute node statistics
stats = Column(Text, default='{}')
# json-encoded dict that contains NUMA topology as generated by
# nova.virt.hardware.VirtNUMAHostTopology.to_json()
numa_topology = Column(Text)
class Certificate(BASE, NovaBase):
"""Represents a x509 certificate."""
__tablename__ = 'certificates'
__table_args__ = (
Index('certificates_project_id_deleted_idx', 'project_id', 'deleted'),
Index('certificates_user_id_deleted_idx', 'user_id', 'deleted')
)
id = Column(Integer, primary_key=True)
user_id = Column(String(255))
project_id = Column(String(255))
file_name = Column(String(255))
class Instance(BASE, NovaBase):
"""Represents a guest VM."""
__tablename__ = 'instances'
__table_args__ = (
Index('uuid', 'uuid', unique=True),
Index('project_id', 'project_id'),
Index('instances_host_deleted_idx',
'host', 'deleted'),
Index('instances_reservation_id_idx',
'reservation_id'),
Index('instances_terminated_at_launched_at_idx',
'terminated_at', 'launched_at'),
Index('instances_uuid_deleted_idx',
'uuid', 'deleted'),
Index('instances_task_state_updated_at_idx',
'task_state', 'updated_at'),
Index('instances_host_node_deleted_idx',
'host', 'node', 'deleted'),
Index('instances_host_deleted_cleaned_idx',
'host', 'deleted', 'cleaned'),
)
injected_files = []
id = Column(Integer, primary_key=True, autoincrement=True)
@property
def name(self):
try:
base_name = CONF.instance_name_template % self.id
except TypeError:
# Support templates like "uuid-%(uuid)s", etc.
info = {}
# NOTE(russellb): Don't use self.iteritems() here, as it will
# result in infinite recursion on the name property.
for column in iter(orm.object_mapper(self).columns):
key = column.name
# prevent recursion if someone specifies %(name)s
# %(name)s will not be valid.
if key == 'name':
continue
info[key] = self[key]
try:
base_name = CONF.instance_name_template % info
except KeyError:
base_name = self.uuid
return base_name
@property
def _extra_keys(self):
return ['name']
user_id = Column(String(255))
project_id = Column(String(255))
image_ref = Column(String(255))
kernel_id = Column(String(255))
ramdisk_id = Column(String(255))
hostname = Column(String(255))
launch_index = Column(Integer)
key_name = Column(String(255))
key_data = Column(MediumText())
power_state = Column(Integer)
vm_state = Column(String(255))
task_state = Column(String(255))
memory_mb = Column(Integer)
vcpus = Column(Integer)
root_gb = Column(Integer)
ephemeral_gb = Column(Integer)
ephemeral_key_uuid = Column(String(36))
# This is not related to hostname, above. It refers
# to the nova node.
host = Column(String(255)) # , ForeignKey('hosts.id'))
# To identify the "ComputeNode" which the instance resides in.
# This equals to ComputeNode.hypervisor_hostname.
node = Column(String(255))
# *not* flavorid, this is the internal primary_key
instance_type_id = Column(Integer)
user_data = Column(MediumText())
reservation_id = Column(String(255))
scheduled_at = Column(DateTime)
launched_at = Column(DateTime)
terminated_at = Column(DateTime)
availability_zone = Column(String(255))
# User editable field for display in user-facing UIs
display_name = Column(String(255))
display_description = Column(String(255))
# To remember on which host an instance booted.
# An instance may have moved to another host by live migration.
launched_on = Column(MediumText())
# NOTE(jdillaman): locked deprecated in favor of locked_by,
# to be removed in Icehouse
locked = Column(Boolean)
locked_by = Column(Enum('owner', 'admin'))
os_type = Column(String(255))
architecture = Column(String(255))
vm_mode = Column(String(255))
uuid = Column(String(36))
root_device_name = Column(String(255))
default_ephemeral_device = Column(String(255))
default_swap_device = Column(String(255))
config_drive = Column(String(255))
# User editable field meant to represent what ip should be used
# to connect to the instance
access_ip_v4 = Column(types.IPAddress())
access_ip_v6 = Column(types.IPAddress())
auto_disk_config = Column(Boolean())
progress = Column(Integer)
# EC2 instance_initiated_shutdown_terminate
# True: -> 'terminate'
# False: -> 'stop'
# Note(maoy): currently Nova will always stop instead of terminate
# no matter what the flag says. So we set the default to False.
shutdown_terminate = Column(Boolean(), default=False)
# EC2 disable_api_termination
disable_terminate = Column(Boolean(), default=False)
# OpenStack compute cell name. This will only be set at the top of
# the cells tree and it'll be a full cell name such as 'api!hop1!hop2'
cell_name = Column(String(255))
internal_id = Column(Integer)
# Records whether an instance has been deleted from disk
cleaned = Column(Integer, default=0)
class InstanceInfoCache(BASE, NovaBase):
"""Represents a cache of information about an instance
"""
__tablename__ = 'instance_info_caches'
__table_args__ = (
schema.UniqueConstraint(
"instance_uuid",
name="uniq_instance_info_caches0instance_uuid"),)
id = Column(Integer, primary_key=True, autoincrement=True)
# text column used for storing a json object of network data for api
network_info = Column(MediumText())
instance_uuid = Column(String(36), ForeignKey('instances.uuid'),
nullable=False)
instance = orm.relationship(Instance,
backref=orm.backref('info_cache', uselist=False),
foreign_keys=instance_uuid,
primaryjoin=instance_uuid == Instance.uuid)
class InstanceExtra(BASE, NovaBase):
__tablename__ = 'instance_extra'
__table_args__ = (
Index('instance_extra_idx', 'instance_uuid'),)
id = Column(Integer, primary_key=True, autoincrement=True)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'),
nullable=False)
numa_topology = Column(Text)
instance = orm.relationship(Instance,
backref=orm.backref('numa_topology',
uselist=False),
foreign_keys=instance_uuid,
primaryjoin=instance_uuid == Instance.uuid)
class InstanceTypes(BASE, NovaBase):
"""Represents possible flavors for instances.
Note: instance_type and flavor are synonyms and the term instance_type is
deprecated and in the process of being removed.
"""
__tablename__ = "instance_types"
__table_args__ = (
schema.UniqueConstraint("flavorid", "deleted",
name="uniq_instance_types0flavorid0deleted"),
schema.UniqueConstraint("name", "deleted",
name="uniq_instance_types0name0deleted")
)
# Internal only primary key/id
id = Column(Integer, primary_key=True)
name = Column(String(255))
memory_mb = Column(Integer, nullable=False)
vcpus = Column(Integer, nullable=False)
root_gb = Column(Integer)
ephemeral_gb = Column(Integer)
# Public facing id will be renamed public_id
flavorid = Column(String(255))
swap = Column(Integer, nullable=False, default=0)
rxtx_factor = Column(Float, default=1)
vcpu_weight = Column(Integer)
disabled = Column(Boolean, default=False)
is_public = Column(Boolean, default=True)
class Volume(BASE, NovaBase):
"""Represents a block storage device that can be attached to a VM."""
__tablename__ = 'volumes'
__table_args__ = (
Index('volumes_instance_uuid_idx', 'instance_uuid'),
)
id = Column(String(36), primary_key=True, nullable=False)
deleted = Column(String(36), default="")
@property
def name(self):
return CONF.volume_name_template % self.id
ec2_id = Column(String(255))
user_id = Column(String(255))
project_id = Column(String(255))
snapshot_id = Column(String(36))
host = Column(String(255))
size = Column(Integer)
availability_zone = Column(String(255))
instance_uuid = Column(String(36))
mountpoint = Column(String(255))
attach_time = Column(DateTime)
status = Column(String(255)) # TODO(vish): enum?
attach_status = Column(String(255)) # TODO(vish): enum
scheduled_at = Column(DateTime)
launched_at = Column(DateTime)
terminated_at = Column(DateTime)
display_name = Column(String(255))
display_description = Column(String(255))
provider_location = Column(String(256))
provider_auth = Column(String(256))
volume_type_id = Column(Integer)
class Quota(BASE, NovaBase):
"""Represents a single quota override for a project.
If there is no row for a given project id and resource, then the
default for the quota class is used. If there is no row for a
given quota class and resource, then the default for the
deployment is used. If the row is present but the hard limit is
Null, then the resource is unlimited.
"""
__tablename__ = 'quotas'
__table_args__ = (
schema.UniqueConstraint("project_id", "resource", "deleted",
name="uniq_quotas0project_id0resource0deleted"
),
)
id = Column(Integer, primary_key=True)
project_id = Column(String(255))
resource = Column(String(255), nullable=False)
hard_limit = Column(Integer)
class ProjectUserQuota(BASE, NovaBase):
"""Represents a single quota override for a user with in a project."""
__tablename__ = 'project_user_quotas'
uniq_name = "uniq_project_user_quotas0user_id0project_id0resource0deleted"
__table_args__ = (
schema.UniqueConstraint("user_id", "project_id", "resource", "deleted",
name=uniq_name),
Index('project_user_quotas_project_id_deleted_idx',
'project_id', 'deleted'),
Index('project_user_quotas_user_id_deleted_idx',
'user_id', 'deleted')
)
id = Column(Integer, primary_key=True, nullable=False)
project_id = Column(String(255), nullable=False)
user_id = Column(String(255), nullable=False)
resource = Column(String(255), nullable=False)
hard_limit = Column(Integer)
class QuotaClass(BASE, NovaBase):
"""Represents a single quota override for a quota class.
If there is no row for a given quota class and resource, then the
default for the deployment is used. If the row is present but the
hard limit is Null, then the resource is unlimited.
"""
__tablename__ = 'quota_classes'
__table_args__ = (
Index('ix_quota_classes_class_name', 'class_name'),
)
id = Column(Integer, primary_key=True)
class_name = Column(String(255))
resource = Column(String(255))
hard_limit = Column(Integer)
class QuotaUsage(BASE, NovaBase):
"""Represents the current usage for a given resource."""
__tablename__ = 'quota_usages'
__table_args__ = (
Index('ix_quota_usages_project_id', 'project_id'),
)
id = Column(Integer, primary_key=True)
project_id = Column(String(255))
user_id = Column(String(255))
resource = Column(String(255), nullable=False)
in_use = Column(Integer, nullable=False)
reserved = Column(Integer, nullable=False)
@property
def total(self):
return self.in_use + self.reserved
until_refresh = Column(Integer)
class Reservation(BASE, NovaBase):
"""Represents a resource reservation for quotas."""
__tablename__ = 'reservations'
__table_args__ = (
Index('ix_reservations_project_id', 'project_id'),
Index('reservations_uuid_idx', 'uuid'),
Index('reservations_deleted_expire_idx', 'deleted', 'expire'),
)
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(String(36), nullable=False)
usage_id = Column(Integer, ForeignKey('quota_usages.id'), nullable=False)
project_id = Column(String(255))
user_id = Column(String(255))
resource = Column(String(255))
delta = Column(Integer, nullable=False)
expire = Column(DateTime)
usage = orm.relationship(
"QuotaUsage",
foreign_keys=usage_id,
primaryjoin='and_(Reservation.usage_id == QuotaUsage.id,'
'QuotaUsage.deleted == 0)')
class Snapshot(BASE, NovaBase):
"""Represents a block storage device that can be attached to a VM."""
__tablename__ = 'snapshots'
__table_args__ = ()
id = Column(String(36), primary_key=True, nullable=False)
deleted = Column(String(36), default="")
@property
def name(self):
return CONF.snapshot_name_template % self.id
@property
def volume_name(self):
return CONF.volume_name_template % self.volume_id
user_id = Column(String(255))
project_id = Column(String(255))
volume_id = Column(String(36), nullable=False)
status = Column(String(255))
progress = Column(String(255))
volume_size = Column(Integer)
scheduled_at = Column(DateTime)
display_name = Column(String(255))
display_description = Column(String(255))
class BlockDeviceMapping(BASE, NovaBase):
"""Represents block device mapping that is defined by EC2."""
__tablename__ = "block_device_mapping"
__table_args__ = (
Index('snapshot_id', 'snapshot_id'),
Index('volume_id', 'volume_id'),
Index('block_device_mapping_instance_uuid_device_name_idx',
'instance_uuid', 'device_name'),
Index('block_device_mapping_instance_uuid_volume_id_idx',
'instance_uuid', 'volume_id'),
Index('block_device_mapping_instance_uuid_idx', 'instance_uuid'),
# TODO(sshturm) Should be dropped. `virtual_name` was dropped
# in 186 migration,
# Duplicates `block_device_mapping_instance_uuid_device_name_idx`
# index.
Index("block_device_mapping_instance_uuid_virtual_name"
"_device_name_idx", 'instance_uuid', 'device_name'),
)
id = Column(Integer, primary_key=True, autoincrement=True)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
instance = orm.relationship(Instance,
backref=orm.backref('block_device_mapping'),
foreign_keys=instance_uuid,
primaryjoin='and_(BlockDeviceMapping.'
'instance_uuid=='
'Instance.uuid,'
'BlockDeviceMapping.deleted=='
'0)')
source_type = Column(String(255))
destination_type = Column(String(255))
guest_format = Column(String(255))
device_type = Column(String(255))
disk_bus = Column(String(255))
boot_index = Column(Integer)
device_name = Column(String(255))
# default=False for compatibility of the existing code.
# With EC2 API,
# default True for ami specified device.
# default False for created with other timing.
# TODO(sshturm) add default in db
delete_on_termination = Column(Boolean, default=False)
snapshot_id = Column(String(36))
volume_id = Column(String(36))
volume_size = Column(Integer)
image_id = Column(String(36))
# for no device to suppress devices.
no_device = Column(Boolean)
connection_info = Column(MediumText())
class IscsiTarget(BASE, NovaBase):
"""Represents an iscsi target for a given host."""
__tablename__ = 'iscsi_targets'
__table_args__ = (
Index('iscsi_targets_volume_id_fkey', 'volume_id'),
Index('iscsi_targets_host_idx', 'host'),
Index('iscsi_targets_host_volume_id_deleted_idx', 'host', 'volume_id',
'deleted')
)
id = Column(Integer, primary_key=True, nullable=False)
target_num = Column(Integer)
host = Column(String(255))
volume_id = Column(String(36), ForeignKey('volumes.id'))
volume = orm.relationship(Volume,
backref=orm.backref('iscsi_target', uselist=False),
foreign_keys=volume_id,
primaryjoin='and_(IscsiTarget.volume_id==Volume.id,'
'IscsiTarget.deleted==0)')
class SecurityGroupInstanceAssociation(BASE, NovaBase):
__tablename__ = 'security_group_instance_association'
__table_args__ = (
Index('security_group_instance_association_instance_uuid_idx',
'instance_uuid'),
)
id = Column(Integer, primary_key=True, nullable=False)
security_group_id = Column(Integer, ForeignKey('security_groups.id'))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
class SecurityGroup(BASE, NovaBase):
"""Represents a security group."""
__tablename__ = 'security_groups'
__table_args__ = (
Index('uniq_security_groups0project_id0name0deleted', 'project_id',
'name', 'deleted'),
)
id = Column(Integer, primary_key=True)
name = Column(String(255))
description = Column(String(255))
user_id = Column(String(255))
project_id = Column(String(255))
instances = orm.relationship(Instance,
secondary="security_group_instance_association",
primaryjoin='and_('
'SecurityGroup.id == '
'SecurityGroupInstanceAssociation.security_group_id,'
'SecurityGroupInstanceAssociation.deleted == 0,'
'SecurityGroup.deleted == 0)',
secondaryjoin='and_('
'SecurityGroupInstanceAssociation.instance_uuid == Instance.uuid,'
# (anthony) the condition below shouldn't be necessary now that the
# association is being marked as deleted. However, removing this
# may cause existing deployments to choke, so I'm leaving it
'Instance.deleted == 0)',
backref='security_groups')
class SecurityGroupIngressRule(BASE, NovaBase):
"""Represents a rule in a security group."""
__tablename__ = 'security_group_rules'
__table_args__ = ()
id = Column(Integer, primary_key=True)
parent_group_id = Column(Integer, ForeignKey('security_groups.id'))
parent_group = orm.relationship("SecurityGroup", backref="rules",
foreign_keys=parent_group_id,
primaryjoin='and_('
'SecurityGroupIngressRule.parent_group_id == SecurityGroup.id,'
'SecurityGroupIngressRule.deleted == 0)')
protocol = Column(String(255))
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(types.CIDR())
# Note: This is not the parent SecurityGroup. It's SecurityGroup we're
# granting access for.
group_id = Column(Integer, ForeignKey('security_groups.id'))
grantee_group = orm.relationship("SecurityGroup",
foreign_keys=group_id,
primaryjoin='and_('
'SecurityGroupIngressRule.group_id == SecurityGroup.id,'
'SecurityGroupIngressRule.deleted == 0)')
class SecurityGroupIngressDefaultRule(BASE, NovaBase):
__tablename__ = 'security_group_default_rules'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False)
protocol = Column(String(5)) # "tcp", "udp" or "icmp"
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(types.CIDR())
class ProviderFirewallRule(BASE, NovaBase):
"""Represents a rule in a security group."""
__tablename__ = 'provider_fw_rules'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False)
protocol = Column(String(5)) # "tcp", "udp", or "icmp"
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(types.CIDR())
class KeyPair(BASE, NovaBase):
"""Represents a public key pair for ssh."""
__tablename__ = 'key_pairs'
__table_args__ = (
schema.UniqueConstraint("user_id", "name", "deleted",
name="uniq_key_pairs0user_id0name0deleted"),
)
id = Column(Integer, primary_key=True, nullable=False)
name = Column(String(255))
user_id = Column(String(255))
fingerprint = Column(String(255))
public_key = Column(MediumText())
class Migration(BASE, NovaBase):
"""Represents a running host-to-host migration."""
__tablename__ = 'migrations'
__table_args__ = (
Index('migrations_instance_uuid_and_status_idx', 'instance_uuid',
'status'),
Index('migrations_by_host_nodes_and_status_idx', 'deleted',
'source_compute', 'dest_compute', 'source_node', 'dest_node',
'status'),
)
id = Column(Integer, primary_key=True, nullable=False)
# NOTE(tr3buchet): the ____compute variables are instance['host']
source_compute = Column(String(255))
dest_compute = Column(String(255))
# nodes are equivalent to a compute node's 'hypervisor_hostname'
source_node = Column(String(255))
dest_node = Column(String(255))
# NOTE(tr3buchet): dest_host, btw, is an ip address
dest_host = Column(String(255))
old_instance_type_id = Column(Integer())
new_instance_type_id = Column(Integer())
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
# TODO(_cerberus_): enum
status = Column(String(255))
instance = orm.relationship("Instance", foreign_keys=instance_uuid,
primaryjoin='and_(Migration.instance_uuid == '
'Instance.uuid, Instance.deleted == '
'0)')
class Network(BASE, NovaBase):
"""Represents a network."""
__tablename__ = 'networks'
__table_args__ = (
schema.UniqueConstraint("vlan", "deleted",
name="uniq_networks0vlan0deleted"),
Index('networks_bridge_deleted_idx', 'bridge', 'deleted'),
Index('networks_host_idx', 'host'),
Index('networks_project_id_deleted_idx', 'project_id', 'deleted'),
Index('networks_uuid_project_id_deleted_idx', 'uuid',
'project_id', 'deleted'),
Index('networks_vlan_deleted_idx', 'vlan', 'deleted'),
Index('networks_cidr_v6_idx', 'cidr_v6')
)
id = Column(Integer, primary_key=True, nullable=False)
label = Column(String(255))
injected = Column(Boolean, default=False)
cidr = Column(types.CIDR())
cidr_v6 = Column(types.CIDR())
multi_host = Column(Boolean, default=False)
gateway_v6 = Column(types.IPAddress())
netmask_v6 = Column(types.IPAddress())
netmask = Column(types.IPAddress())
bridge = Column(String(255))
bridge_interface = Column(String(255))
gateway = Column(types.IPAddress())
broadcast = Column(types.IPAddress())
dns1 = Column(types.IPAddress())
dns2 = Column(types.IPAddress())
vlan = Column(Integer)
vpn_public_address = Column(types.IPAddress())
vpn_public_port = Column(Integer)
vpn_private_address = Column(types.IPAddress())
dhcp_start = Column(types.IPAddress())
rxtx_base = Column(Integer)
project_id = Column(String(255))
priority = Column(Integer)
host = Column(String(255)) # , ForeignKey('hosts.id'))
uuid = Column(String(36))
mtu = Column(Integer)
dhcp_server = Column(types.IPAddress())
enable_dhcp = Column(Boolean, default=True)
share_address = Column(Boolean, default=False)
class VirtualInterface(BASE, NovaBase):
"""Represents a virtual interface on an instance."""
__tablename__ = 'virtual_interfaces'
__table_args__ = (
schema.UniqueConstraint("address", "deleted",
name="uniq_virtual_interfaces0address0deleted"),
Index('network_id', 'network_id'),
Index('virtual_interfaces_instance_uuid_fkey', 'instance_uuid'),
)
id = Column(Integer, primary_key=True, nullable=False)
address = Column(String(255))
network_id = Column(Integer)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
uuid = Column(String(36))
# TODO(vish): can these both come from the same baseclass?
class FixedIp(BASE, NovaBase):
"""Represents a fixed ip for an instance."""
__tablename__ = 'fixed_ips'
__table_args__ = (
schema.UniqueConstraint(
"address", "deleted", name="uniq_fixed_ips0address0deleted"),
Index('fixed_ips_virtual_interface_id_fkey', 'virtual_interface_id'),
Index('network_id', 'network_id'),
Index('address', 'address'),
Index('fixed_ips_instance_uuid_fkey', 'instance_uuid'),
Index('fixed_ips_host_idx', 'host'),
Index('fixed_ips_network_id_host_deleted_idx', 'network_id', 'host',
'deleted'),
Index('fixed_ips_address_reserved_network_id_deleted_idx',
'address', 'reserved', 'network_id', 'deleted'),
Index('fixed_ips_deleted_allocated_idx', 'address', 'deleted',
'allocated')
)
id = Column(Integer, primary_key=True)
address = Column(types.IPAddress())
network_id = Column(Integer)
virtual_interface_id = Column(Integer)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
# associated means that a fixed_ip has its instance_id column set
# allocated means that a fixed_ip has its virtual_interface_id column set
# TODO(sshturm) add default in db
allocated = Column(Boolean, default=False)
# leased means dhcp bridge has leased the ip
# TODO(sshturm) add default in db
leased = Column(Boolean, default=False)
# TODO(sshturm) add default in db
reserved = Column(Boolean, default=False)
host = Column(String(255))
network = orm.relationship(Network,
backref=orm.backref('fixed_ips'),
foreign_keys=network_id,
primaryjoin='and_('
'FixedIp.network_id == Network.id,'
'FixedIp.deleted == 0,'
'Network.deleted == 0)')
instance = orm.relationship(Instance,
foreign_keys=instance_uuid,
primaryjoin='and_('
'FixedIp.instance_uuid == Instance.uuid,'
'FixedIp.deleted == 0,'
'Instance.deleted == 0)')
class FloatingIp(BASE, NovaBase):
"""Represents a floating ip that dynamically forwards to a fixed ip."""
__tablename__ = 'floating_ips'
__table_args__ = (
schema.UniqueConstraint("address", "deleted",
name="uniq_floating_ips0address0deleted"),
Index('fixed_ip_id', 'fixed_ip_id'),
Index('floating_ips_host_idx', 'host'),
Index('floating_ips_project_id_idx', 'project_id'),
Index('floating_ips_pool_deleted_fixed_ip_id_project_id_idx',
'pool', 'deleted', 'fixed_ip_id', 'project_id')
)
id = Column(Integer, primary_key=True)
address = Column(types.IPAddress())
fixed_ip_id = Column(Integer)
project_id = Column(String(255))
host = Column(String(255)) # , ForeignKey('hosts.id'))
auto_assigned = Column(Boolean, default=False)
# TODO(sshturm) add default in db
pool = Column(String(255))
interface = Column(String(255))
fixed_ip = orm.relationship(FixedIp,
backref=orm.backref('floating_ips'),
foreign_keys=fixed_ip_id,
primaryjoin='and_('
'FloatingIp.fixed_ip_id == FixedIp.id,'
'FloatingIp.deleted == 0,'
'FixedIp.deleted == 0)')
class DNSDomain(BASE, NovaBase):
"""Represents a DNS domain with availability zone or project info."""
__tablename__ = 'dns_domains'
__table_args__ = (
Index('project_id', 'project_id'),
Index('dns_domains_domain_deleted_idx', 'domain', 'deleted'),
)
deleted = Column(Boolean, default=False)
domain = Column(String(255), primary_key=True)
scope = Column(String(255))
availability_zone = Column(String(255))
project_id = Column(String(255))
class ConsolePool(BASE, NovaBase):
"""Represents pool of consoles on the same physical node."""
__tablename__ = 'console_pools'
__table_args__ = (
schema.UniqueConstraint(
"host", "console_type", "compute_host", "deleted",
name="uniq_console_pools0host0console_type0compute_host0deleted"),
)
id = Column(Integer, primary_key=True)
address = Column(types.IPAddress())
username = Column(String(255))
password = Column(String(255))
console_type = Column(String(255))
public_hostname = Column(String(255))
host = Column(String(255))
compute_host = Column(String(255))
class Console(BASE, NovaBase):
"""Represents a console session for an instance."""
__tablename__ = 'consoles'
__table_args__ = (
Index('consoles_instance_uuid_idx', 'instance_uuid'),
)
id = Column(Integer, primary_key=True)
instance_name = Column(String(255))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
password = Column(String(255))
port = Column(Integer)
pool_id = Column(Integer, ForeignKey('console_pools.id'))
pool = orm.relationship(ConsolePool, backref=orm.backref('consoles'))
class InstanceMetadata(BASE, NovaBase):
"""Represents a user-provided metadata key/value pair for an instance."""
__tablename__ = 'instance_metadata'
__table_args__ = (
Index('instance_metadata_instance_uuid_idx', 'instance_uuid'),
)
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
instance = orm.relationship(Instance, backref="metadata",
foreign_keys=instance_uuid,
primaryjoin='and_('
'InstanceMetadata.instance_uuid == '
'Instance.uuid,'
'InstanceMetadata.deleted == 0)')
class InstanceSystemMetadata(BASE, NovaBase):
"""Represents a system-owned metadata key/value pair for an instance."""
__tablename__ = 'instance_system_metadata'
__table_args__ = ()
id = Column(Integer, primary_key=True)
key = Column(String(255), nullable=False)
value = Column(String(255))
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'),
nullable=False)
primary_join = ('and_(InstanceSystemMetadata.instance_uuid == '
'Instance.uuid, InstanceSystemMetadata.deleted == 0)')
instance = orm.relationship(Instance, backref="system_metadata",
foreign_keys=instance_uuid,
primaryjoin=primary_join)
class InstanceTypeProjects(BASE, NovaBase):
"""Represent projects associated instance_types."""
__tablename__ = "instance_type_projects"
__table_args__ = (schema.UniqueConstraint(
"instance_type_id", "project_id", "deleted",
name="uniq_instance_type_projects0instance_type_id0project_id0deleted"
),
)
id = Column(Integer, primary_key=True)
instance_type_id = Column(Integer, ForeignKey('instance_types.id'),
nullable=False)
project_id = Column(String(255))
instance_type = orm.relationship(InstanceTypes, backref="projects",
foreign_keys=instance_type_id,
primaryjoin='and_('
'InstanceTypeProjects.instance_type_id == InstanceTypes.id,'
'InstanceTypeProjects.deleted == 0)')
class InstanceTypeExtraSpecs(BASE, NovaBase):
"""Represents additional specs as key/value pairs for an instance_type."""
__tablename__ = 'instance_type_extra_specs'
__table_args__ = (
Index('instance_type_extra_specs_instance_type_id_key_idx',
'instance_type_id', 'key'),
schema.UniqueConstraint(
"instance_type_id", "key", "deleted",
name=("uniq_instance_type_extra_specs0"
"instance_type_id0key0deleted")
),
)
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
instance_type_id = Column(Integer, ForeignKey('instance_types.id'),
nullable=False)
instance_type = orm.relationship(InstanceTypes, backref="extra_specs",
foreign_keys=instance_type_id,
primaryjoin='and_('
'InstanceTypeExtraSpecs.instance_type_id == InstanceTypes.id,'
'InstanceTypeExtraSpecs.deleted == 0)')
class Cell(BASE, NovaBase):
"""Represents parent and child cells of this cell. Cells can
have multiple parents and children, so there could be any number
of entries with is_parent=True or False
"""
__tablename__ = 'cells'
__table_args__ = (schema.UniqueConstraint(
"name", "deleted", name="uniq_cells0name0deleted"
),
)
id = Column(Integer, primary_key=True)
# Name here is the 'short name' of a cell. For instance: 'child1'
name = Column(String(255))
api_url = Column(String(255))
transport_url = Column(String(255), nullable=False)
weight_offset = Column(Float(), default=0.0)
weight_scale = Column(Float(), default=1.0)
is_parent = Column(Boolean())
class AggregateHost(BASE, NovaBase):
"""Represents a host that is member of an aggregate."""
__tablename__ = 'aggregate_hosts'
__table_args__ = (schema.UniqueConstraint(
"host", "aggregate_id", "deleted",
name="uniq_aggregate_hosts0host0aggregate_id0deleted"
),
)
id = Column(Integer, primary_key=True, autoincrement=True)
host = Column(String(255))
aggregate_id = Column(Integer, ForeignKey('aggregates.id'), nullable=False)
class AggregateMetadata(BASE, NovaBase):
"""Represents a metadata key/value pair for an aggregate."""
__tablename__ = 'aggregate_metadata'
__table_args__ = (
schema.UniqueConstraint("aggregate_id", "key", "deleted",
name="uniq_aggregate_metadata0aggregate_id0key0deleted"
),
Index('aggregate_metadata_key_idx', 'key'),
)
id = Column(Integer, primary_key=True)
key = Column(String(255), nullable=False)
value = Column(String(255), nullable=False)
aggregate_id = Column(Integer, ForeignKey('aggregates.id'), nullable=False)
class Aggregate(BASE, NovaBase):
"""Represents a cluster of hosts that exists in this zone."""
__tablename__ = 'aggregates'
__table_args__ = ()
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(255))
_hosts = orm.relationship(AggregateHost,
primaryjoin='and_('
'Aggregate.id == AggregateHost.aggregate_id,'
'AggregateHost.deleted == 0,'
'Aggregate.deleted == 0)')
_metadata = orm.relationship(AggregateMetadata,
primaryjoin='and_('
'Aggregate.id == AggregateMetadata.aggregate_id,'
'AggregateMetadata.deleted == 0,'
'Aggregate.deleted == 0)')
@property
def _extra_keys(self):
return ['hosts', 'metadetails', 'availability_zone']
@property
def hosts(self):
return [h.host for h in self._hosts]
@property
def metadetails(self):
return dict([(m.key, m.value) for m in self._metadata])
@property
def availability_zone(self):
if 'availability_zone' not in self.metadetails:
return None
return self.metadetails['availability_zone']
class AgentBuild(BASE, NovaBase):
"""Represents an agent build."""
__tablename__ = 'agent_builds'
__table_args__ = (
Index('agent_builds_hypervisor_os_arch_idx', 'hypervisor', 'os',
'architecture'),
schema.UniqueConstraint("hypervisor", "os", "architecture", "deleted",
name="uniq_agent_builds0hypervisor0os0architecture0deleted"),
)
id = Column(Integer, primary_key=True)
hypervisor = Column(String(255))
os = Column(String(255))
architecture = Column(String(255))
version = Column(String(255))
url = Column(String(255))
md5hash = Column(String(255))
class BandwidthUsage(BASE, NovaBase):
"""Cache for instance bandwidth usage data pulled from the hypervisor."""
__tablename__ = 'bw_usage_cache'
__table_args__ = (
Index('bw_usage_cache_uuid_start_period_idx', 'uuid',
'start_period'),
)
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(String(36))
mac = Column(String(255))
start_period = Column(DateTime, nullable=False)
last_refreshed = Column(DateTime)
bw_in = Column(BigInteger)
bw_out = Column(BigInteger)
last_ctr_in = Column(BigInteger)
last_ctr_out = Column(BigInteger)
class VolumeUsage(BASE, NovaBase):
"""Cache for volume usage data pulled from the hypervisor."""
__tablename__ = 'volume_usage_cache'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False)
volume_id = Column(String(36), nullable=False)
instance_uuid = Column(String(36))
project_id = Column(String(36))
user_id = Column(String(64))
availability_zone = Column(String(255))
tot_last_refreshed = Column(DateTime)
tot_reads = Column(BigInteger, default=0)
tot_read_bytes = Column(BigInteger, default=0)
tot_writes = Column(BigInteger, default=0)
tot_write_bytes = Column(BigInteger, default=0)
curr_last_refreshed = Column(DateTime)
curr_reads = Column(BigInteger, default=0)
curr_read_bytes = Column(BigInteger, default=0)
curr_writes = Column(BigInteger, default=0)
curr_write_bytes = Column(BigInteger, default=0)
class S3Image(BASE, NovaBase):
"""Compatibility layer for the S3 image service talking to Glance."""
__tablename__ = 's3_images'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class VolumeIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 volume service."""
__tablename__ = 'volume_id_mappings'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class SnapshotIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 snapshot service."""
__tablename__ = 'snapshot_id_mappings'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class InstanceFault(BASE, NovaBase):
__tablename__ = 'instance_faults'
__table_args__ = (
Index('instance_faults_host_idx', 'host'),
Index('instance_faults_instance_uuid_deleted_created_at_idx',
'instance_uuid', 'deleted', 'created_at')
)
id = Column(Integer, primary_key=True, nullable=False)
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'))
code = Column(Integer(), nullable=False)
message = Column(String(255))
details = Column(MediumText())
host = Column(String(255))
class InstanceAction(BASE, NovaBase):
"""Track client actions on an instance.
The intention is that there will only be one of these per user request. A
lookup by (instance_uuid, request_id) should always return a single result.
"""
__tablename__ = 'instance_actions'
__table_args__ = (
Index('instance_uuid_idx', 'instance_uuid'),
Index('request_id_idx', 'request_id')
)
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
action = Column(String(255))
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'))
request_id = Column(String(255))
user_id = Column(String(255))
project_id = Column(String(255))
start_time = Column(DateTime, default=timeutils.utcnow)
finish_time = Column(DateTime)
message = Column(String(255))
class InstanceActionEvent(BASE, NovaBase):
"""Track events that occur during an InstanceAction."""
__tablename__ = 'instance_actions_events'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
event = Column(String(255))
action_id = Column(Integer, ForeignKey('instance_actions.id'))
start_time = Column(DateTime, default=timeutils.utcnow)
finish_time = Column(DateTime)
result = Column(String(255))
traceback = Column(Text)
host = Column(String(255))
details = Column(Text)
class InstanceIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 instance service."""
__tablename__ = 'instance_id_mappings'
__table_args__ = (
Index('ix_instance_id_mappings_uuid', 'uuid'),
)
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class TaskLog(BASE, NovaBase):
"""Audit log for background periodic tasks."""
__tablename__ = 'task_log'
__table_args__ = (
schema.UniqueConstraint(
'task_name', 'host', 'period_beginning', 'period_ending',
name="uniq_task_log0task_name0host0period_beginning0period_ending"
),
Index('ix_task_log_period_beginning', 'period_beginning'),
Index('ix_task_log_host', 'host'),
Index('ix_task_log_period_ending', 'period_ending'),
)
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
task_name = Column(String(255), nullable=False)
state = Column(String(255), nullable=False)
host = Column(String(255), nullable=False)
period_beginning = Column(DateTime, default=timeutils.utcnow,
nullable=False)
period_ending = Column(DateTime, default=timeutils.utcnow,
nullable=False)
message = Column(String(255), nullable=False)
task_items = Column(Integer(), default=0)
errors = Column(Integer(), default=0)
class InstanceGroupMember(BASE, NovaBase):
"""Represents the members for an instance group."""
__tablename__ = 'instance_group_member'
__table_args__ = (
Index('instance_group_member_instance_idx', 'instance_id'),
)
id = Column(Integer, primary_key=True, nullable=False)
instance_id = Column(String(255))
group_id = Column(Integer, ForeignKey('instance_groups.id'),
nullable=False)
class InstanceGroupPolicy(BASE, NovaBase):
"""Represents the policy type for an instance group."""
__tablename__ = 'instance_group_policy'
__table_args__ = (
Index('instance_group_policy_policy_idx', 'policy'),
)
id = Column(Integer, primary_key=True, nullable=False)
policy = Column(String(255))
group_id = Column(Integer, ForeignKey('instance_groups.id'),
nullable=False)
class InstanceGroup(BASE, NovaBase):
"""Represents an instance group.
A group will maintain a collection of instances and the relationship
between them.
"""
__tablename__ = 'instance_groups'
__table_args__ = (
schema.UniqueConstraint("uuid", "deleted",
name="uniq_instance_groups0uuid0deleted"),
)
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(String(255))
project_id = Column(String(255))
uuid = Column(String(36), nullable=False)
name = Column(String(255))
_policies = orm.relationship(InstanceGroupPolicy, primaryjoin='and_('
'InstanceGroup.id == InstanceGroupPolicy.group_id,'
'InstanceGroupPolicy.deleted == 0,'
'InstanceGroup.deleted == 0)')
_members = orm.relationship(InstanceGroupMember, primaryjoin='and_('
'InstanceGroup.id == InstanceGroupMember.group_id,'
'InstanceGroupMember.deleted == 0,'
'InstanceGroup.deleted == 0)')
@property
def policies(self):
return [p.policy for p in self._policies]
@property
def members(self):
return [m.instance_id for m in self._members]
class PciDevice(BASE, NovaBase):
"""Represents a PCI host device that can be passed through to instances.
"""
__tablename__ = 'pci_devices'
__table_args__ = (
Index('ix_pci_devices_compute_node_id_deleted',
'compute_node_id', 'deleted'),
Index('ix_pci_devices_instance_uuid_deleted',
'instance_uuid', 'deleted'),
schema.UniqueConstraint(
"compute_node_id", "address", "deleted",
name="uniq_pci_devices0compute_node_id0address0deleted")
)
id = Column(Integer, primary_key=True)
compute_node_id = Column(Integer, ForeignKey('compute_nodes.id'),
nullable=False)
# physical address of device domain:bus:slot.func (0000:09:01.1)
address = Column(String(12), nullable=False)
vendor_id = Column(String(4), nullable=False)
product_id = Column(String(4), nullable=False)
dev_type = Column(String(8), nullable=False)
dev_id = Column(String(255))
# label is abstract device name, that is used to unify devices with the
# same functionality with different addresses or host.
label = Column(String(255), nullable=False)
status = Column(String(36), nullable=False)
extra_info = Column(Text)
instance_uuid = Column(String(36))
instance = orm.relationship(Instance, backref="pci_devices",
foreign_keys=instance_uuid,
primaryjoin='and_('
'PciDevice.instance_uuid == Instance.uuid,'
'PciDevice.deleted == 0)')
| jumpstarter-io/nova | nova/db/sqlalchemy/models.py | Python | apache-2.0 | 52,354 |
#!/usr/bin/env python3
"""
wsdump.py
websocket - WebSocket client library for Python
Copyright 2021 engn33r
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import argparse
import code
import sys
import threading
import time
import ssl
import gzip
import zlib
from urllib.parse import urlparse
import websocket
try:
import readline
except ImportError:
pass
def get_encoding():
encoding = getattr(sys.stdin, "encoding", "")
if not encoding:
return "utf-8"
else:
return encoding.lower()
OPCODE_DATA = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY)
ENCODING = get_encoding()
class VAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
if values is None:
values = "1"
try:
values = int(values)
except ValueError:
values = values.count("v") + 1
setattr(args, self.dest, values)
def parse_args():
parser = argparse.ArgumentParser(description="WebSocket Simple Dump Tool")
parser.add_argument("url", metavar="ws_url",
help="websocket url. ex. ws://echo.websocket.org/")
parser.add_argument("-p", "--proxy",
help="proxy url. ex. http://127.0.0.1:8080")
parser.add_argument("-v", "--verbose", default=0, nargs='?', action=VAction,
dest="verbose",
help="set verbose mode. If set to 1, show opcode. "
"If set to 2, enable to trace websocket module")
parser.add_argument("-n", "--nocert", action='store_true',
help="Ignore invalid SSL cert")
parser.add_argument("-r", "--raw", action="store_true",
help="raw output")
parser.add_argument("-s", "--subprotocols", nargs='*',
help="Set subprotocols")
parser.add_argument("-o", "--origin",
help="Set origin")
parser.add_argument("--eof-wait", default=0, type=int,
help="wait time(second) after 'EOF' received.")
parser.add_argument("-t", "--text",
help="Send initial text")
parser.add_argument("--timings", action="store_true",
help="Print timings in seconds")
parser.add_argument("--headers",
help="Set custom headers. Use ',' as separator")
return parser.parse_args()
class RawInput:
def raw_input(self, prompt):
line = input(prompt)
if ENCODING and ENCODING != "utf-8" and not isinstance(line, str):
line = line.decode(ENCODING).encode("utf-8")
elif isinstance(line, str):
line = line.encode("utf-8")
return line
class InteractiveConsole(RawInput, code.InteractiveConsole):
def write(self, data):
sys.stdout.write("\033[2K\033[E")
# sys.stdout.write("\n")
sys.stdout.write("\033[34m< " + data + "\033[39m")
sys.stdout.write("\n> ")
sys.stdout.flush()
def read(self):
return self.raw_input("> ")
class NonInteractive(RawInput):
def write(self, data):
sys.stdout.write(data)
sys.stdout.write("\n")
sys.stdout.flush()
def read(self):
return self.raw_input("")
def main():
start_time = time.time()
args = parse_args()
if args.verbose > 1:
websocket.enableTrace(True)
options = {}
if args.proxy:
p = urlparse(args.proxy)
options["http_proxy_host"] = p.hostname
options["http_proxy_port"] = p.port
if args.origin:
options["origin"] = args.origin
if args.subprotocols:
options["subprotocols"] = args.subprotocols
opts = {}
if args.nocert:
opts = {"cert_reqs": ssl.CERT_NONE, "check_hostname": False}
if args.headers:
options['header'] = list(map(str.strip, args.headers.split(',')))
ws = websocket.create_connection(args.url, sslopt=opts, **options)
if args.raw:
console = NonInteractive()
else:
console = InteractiveConsole()
print("Press Ctrl+C to quit")
def recv():
try:
frame = ws.recv_frame()
except websocket.WebSocketException:
return websocket.ABNF.OPCODE_CLOSE, None
if not frame:
raise websocket.WebSocketException("Not a valid frame %s" % frame)
elif frame.opcode in OPCODE_DATA:
return frame.opcode, frame.data
elif frame.opcode == websocket.ABNF.OPCODE_CLOSE:
ws.send_close()
return frame.opcode, None
elif frame.opcode == websocket.ABNF.OPCODE_PING:
ws.pong(frame.data)
return frame.opcode, frame.data
return frame.opcode, frame.data
def recv_ws():
while True:
opcode, data = recv()
msg = None
if opcode == websocket.ABNF.OPCODE_TEXT and isinstance(data, bytes):
data = str(data, "utf-8")
if isinstance(data, bytes) and len(data) > 2 and data[:2] == b'\037\213': # gzip magick
try:
data = "[gzip] " + str(gzip.decompress(data), "utf-8")
except:
pass
elif isinstance(data, bytes):
try:
data = "[zlib] " + str(zlib.decompress(data, -zlib.MAX_WBITS), "utf-8")
except:
pass
if isinstance(data, bytes):
data = repr(data)
if args.verbose:
msg = "%s: %s" % (websocket.ABNF.OPCODE_MAP.get(opcode), data)
else:
msg = data
if msg is not None:
if args.timings:
console.write(str(time.time() - start_time) + ": " + msg)
else:
console.write(msg)
if opcode == websocket.ABNF.OPCODE_CLOSE:
break
thread = threading.Thread(target=recv_ws)
thread.daemon = True
thread.start()
if args.text:
ws.send(args.text)
while True:
try:
message = console.read()
ws.send(message)
except KeyboardInterrupt:
return
except EOFError:
time.sleep(args.eof_wait)
return
if __name__ == "__main__":
try:
main()
except Exception as e:
print(e)
| liris/websocket-client | bin/wsdump.py | Python | lgpl-3.0 | 6,906 |
"""
Adds this import line:
from builtins import XYZ
for each of the functions XYZ that is used in the module.
"""
from __future__ import unicode_literals
from lib2to3 import fixer_base
from lib2to3.pygram import python_symbols as syms
from lib2to3.fixer_util import Name, Call, in_special_context
from libfuturize.fixer_util import touch_import_top
# All builtins are:
# from future.builtins.iterators import (filter, map, zip)
# from future.builtins.misc import (ascii, chr, hex, input, isinstance, oct, open, round, super)
# from future.types import (bytes, dict, int, range, str)
# We don't need isinstance any more.
replaced_builtins = '''filter map zip
ascii chr hex input next oct open round super
bytes dict int range str'''.split()
expression = '|'.join(["name='{0}'".format(name) for name in replaced_builtins])
class FixFutureBuiltins(fixer_base.BaseFix):
BM_compatible = True
run_order = 9
# Currently we only match uses as a function. This doesn't match e.g.:
# if isinstance(s, str):
# ...
PATTERN = """
power<
({0}) trailer< '(' args=[any] ')' >
rest=any* >
""".format(expression)
def transform(self, node, results):
name = results["name"]
touch_import_top(u'builtins', name.value, node)
# name.replace(Name(u"input", prefix=name.prefix))
| noam09/deluge-telegramer | telegramer/include/libpasteurize/fixes/fix_future_builtins.py | Python | gpl-3.0 | 1,450 |
import datetime
one_day = datetime.timedelta(days=1)
def get_semana(date):
"""Return the full week (Monday first) of the week containing the given date.
'date' may be a datetime or date instance (the same type is returned).
"""
day_idx = (date.weekday()) % 7 # turn sunday into 0, monday into 1, etc.
sunday = date - datetime.timedelta(days=day_idx)
date = sunday
for n in xrange(7):
yield date
date += one_day
def semana_actual():
return list(get_semana(datetime.datetime.now().date())) | mchaparro/horarios | horarios/views/dias_semana.py | Python | apache-2.0 | 518 |
# copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
# copyright 2003-2010 Sylvain Thenault, all rights reserved.
# contact mailto:[email protected]
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""this module contains a set of functions to create astng trees from scratch
(build_* functions) or from living object (object_build_* functions)
"""
__docformat__ = "restructuredtext en"
import sys
from os.path import abspath
from inspect import (getargspec, isdatadescriptor, isfunction, ismethod,
ismethoddescriptor, isclass, isbuiltin)
from logilab.astng import BUILTINS_MODULE
from logilab.astng.node_classes import CONST_CLS
from logilab.astng.nodes import (Module, Class, Const, const_factory, From,
Function, EmptyNode, Name, Arguments, Dict, List, Set, Tuple)
from logilab.astng.bases import Generator
from logilab.astng.manager import ASTNGManager
MANAGER = ASTNGManager()
_CONSTANTS = tuple(CONST_CLS) # the keys of CONST_CLS eg python builtin types
def _attach_local_node(parent, node, name):
node.name = name # needed by add_local_node
parent.add_local_node(node)
_marker = object()
def attach_dummy_node(node, name, object=_marker):
"""create a dummy node and register it in the locals of the given
node with the specified name
"""
enode = EmptyNode()
enode.object = object
_attach_local_node(node, enode, name)
EmptyNode.has_underlying_object = lambda self: self.object is not _marker
def attach_const_node(node, name, value):
"""create a Const node and register it in the locals of the given
node with the specified name
"""
if not name in node.special_attributes:
_attach_local_node(node, const_factory(value), name)
def attach_import_node(node, modname, membername):
"""create a From node and register it in the locals of the given
node with the specified name
"""
from_node = From(modname, [(membername, None)])
_attach_local_node(node, from_node, membername)
def build_module(name, doc=None):
"""create and initialize a astng Module node"""
node = Module(name, doc, pure_python=False)
node.package = False
node.parent = None
return node
def build_class(name, basenames=(), doc=None):
"""create and initialize a astng Class node"""
node = Class(name, doc)
for base in basenames:
basenode = Name()
basenode.name = base
node.bases.append(basenode)
basenode.parent = node
return node
def build_function(name, args=None, defaults=None, flag=0, doc=None):
"""create and initialize a astng Function node"""
args, defaults = args or [], defaults or []
# first argument is now a list of decorators
func = Function(name, doc)
func.args = argsnode = Arguments()
argsnode.args = []
for arg in args:
argsnode.args.append(Name())
argsnode.args[-1].name = arg
argsnode.args[-1].parent = argsnode
argsnode.defaults = []
for default in defaults:
argsnode.defaults.append(const_factory(default))
argsnode.defaults[-1].parent = argsnode
argsnode.kwarg = None
argsnode.vararg = None
argsnode.parent = func
if args:
register_arguments(func)
return func
def build_from_import(fromname, names):
"""create and initialize an astng From import statement"""
return From(fromname, [(name, None) for name in names])
def register_arguments(func, args=None):
"""add given arguments to local
args is a list that may contains nested lists
(i.e. def func(a, (b, c, d)): ...)
"""
if args is None:
args = func.args.args
if func.args.vararg:
func.set_local(func.args.vararg, func.args)
if func.args.kwarg:
func.set_local(func.args.kwarg, func.args)
for arg in args:
if isinstance(arg, Name):
func.set_local(arg.name, arg)
else:
register_arguments(func, arg.elts)
def object_build_class(node, member, localname):
"""create astng for a living class object"""
basenames = [base.__name__ for base in member.__bases__]
return _base_class_object_build(node, member, basenames,
localname=localname)
def object_build_function(node, member, localname):
"""create astng for a living function object"""
args, varargs, varkw, defaults = getargspec(member)
if varargs is not None:
args.append(varargs)
if varkw is not None:
args.append(varkw)
func = build_function(getattr(member, '__name__', None) or localname, args,
defaults, member.func_code.co_flags, member.__doc__)
node.add_local_node(func, localname)
def object_build_datadescriptor(node, member, name):
"""create astng for a living data descriptor object"""
return _base_class_object_build(node, member, [], name)
def object_build_methoddescriptor(node, member, localname):
"""create astng for a living method descriptor object"""
# FIXME get arguments ?
func = build_function(getattr(member, '__name__', None) or localname,
doc=member.__doc__)
# set node's arguments to None to notice that we have no information, not
# and empty argument list
func.args.args = None
node.add_local_node(func, localname)
def _base_class_object_build(node, member, basenames, name=None, localname=None):
"""create astng for a living class object, with a given set of base names
(e.g. ancestors)
"""
klass = build_class(name or getattr(member, '__name__', None) or localname,
basenames, member.__doc__)
klass._newstyle = isinstance(member, type)
node.add_local_node(klass, localname)
try:
# limit the instantiation trick since it's too dangerous
# (such as infinite test execution...)
# this at least resolves common case such as Exception.args,
# OSError.errno
if issubclass(member, Exception):
instdict = member().__dict__
else:
raise TypeError
except:
pass
else:
for name, obj in instdict.items():
valnode = EmptyNode()
valnode.object = obj
valnode.parent = klass
valnode.lineno = 1
klass.instance_attrs[name] = [valnode]
return klass
class InspectBuilder(object):
"""class for building nodes from living object
this is actually a really minimal representation, including only Module,
Function and Class nodes and some others as guessed.
"""
# astng from living objects ###############################################
def __init__(self):
self._done = {}
self._module = None
def inspect_build(self, module, modname=None, path=None):
"""build astng from a living module (i.e. using inspect)
this is used when there is no python source code available (either
because it's a built-in module or because the .py is not available)
"""
self._module = module
if modname is None:
modname = module.__name__
try:
node = build_module(modname, module.__doc__)
except AttributeError:
# in jython, java modules have no __doc__ (see #109562)
node = build_module(modname)
node.file = node.path = path and abspath(path) or path
MANAGER.astng_cache[modname] = node
node.package = hasattr(module, '__path__')
self._done = {}
self.object_build(node, module)
return node
def object_build(self, node, obj):
"""recursive method which create a partial ast from real objects
(only function, class, and method are handled)
"""
if obj in self._done:
return self._done[obj]
self._done[obj] = node
for name in dir(obj):
try:
member = getattr(obj, name)
except AttributeError:
# damned ExtensionClass.Base, I know you're there !
attach_dummy_node(node, name)
continue
if ismethod(member):
member = member.im_func
if isfunction(member):
# verify this is not an imported function
filename = getattr(member.func_code, 'co_filename', None)
if filename is None:
assert isinstance(member, object)
object_build_methoddescriptor(node, member, name)
elif filename != getattr(self._module, '__file__', None):
attach_dummy_node(node, name, member)
else:
object_build_function(node, member, name)
elif isbuiltin(member):
if self.imported_member(node, member, name):
#if obj is object:
# print 'skippp', obj, name, member
continue
object_build_methoddescriptor(node, member, name)
elif isclass(member):
if self.imported_member(node, member, name):
continue
if member in self._done:
class_node = self._done[member]
if not class_node in node.locals.get(name, ()):
node.add_local_node(class_node, name)
else:
class_node = object_build_class(node, member, name)
# recursion
self.object_build(class_node, member)
if name == '__class__' and class_node.parent is None:
class_node.parent = self._done[self._module]
elif ismethoddescriptor(member):
assert isinstance(member, object)
object_build_methoddescriptor(node, member, name)
elif isdatadescriptor(member):
assert isinstance(member, object)
object_build_datadescriptor(node, member, name)
elif isinstance(member, _CONSTANTS):
attach_const_node(node, name, member)
else:
# create an empty node so that the name is actually defined
attach_dummy_node(node, name, member)
def imported_member(self, node, member, name):
"""verify this is not an imported class or handle it"""
# /!\ some classes like ExtensionClass doesn't have a __module__
# attribute ! Also, this may trigger an exception on badly built module
# (see http://www.logilab.org/ticket/57299 for instance)
try:
modname = getattr(member, '__module__', None)
except:
# XXX use logging
print 'unexpected error while building astng from living object'
import traceback
traceback.print_exc()
modname = None
if modname is None:
if name in ('__new__', '__subclasshook__'):
# Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14)
# >>> print object.__new__.__module__
# None
modname = BUILTINS_MODULE
else:
attach_dummy_node(node, name, member)
return True
if {'gtk': 'gtk._gtk'}.get(modname, modname) != self._module.__name__:
# check if it sounds valid and then add an import node, else use a
# dummy node
try:
getattr(sys.modules[modname], name)
except (KeyError, AttributeError):
attach_dummy_node(node, name, member)
else:
attach_import_node(node, modname, name)
return True
return False
### astng boot strapping ################################################### ###
_CONST_PROXY = {}
def astng_boot_strapping():
"""astng boot strapping the builtins module"""
# this boot strapping is necessary since we need the Const nodes to
# inspect_build builtins, and then we can proxy Const
builder = InspectBuilder()
from logilab.common.compat import builtins
astng_builtin = builder.inspect_build(builtins)
for cls, node_cls in CONST_CLS.items():
if cls is type(None):
proxy = build_class('NoneType')
proxy.parent = astng_builtin
else:
proxy = astng_builtin.getattr(cls.__name__)[0] # XXX
if cls in (dict, list, set, tuple):
node_cls._proxied = proxy
else:
_CONST_PROXY[cls] = proxy
astng_boot_strapping()
# TODO : find a nicer way to handle this situation;
# However __proxied introduced an
# infinite recursion (see https://bugs.launchpad.net/pylint/+bug/456870)
def _set_proxied(const):
return _CONST_PROXY[const.value.__class__]
Const._proxied = property(_set_proxied)
# FIXME : is it alright that Generator._proxied is not a astng node?
Generator._proxied = MANAGER.infer_astng_from_something(type(a for a in ()))
| gkarlin/django-jenkins | build/logilab-astng/raw_building.py | Python | lgpl-3.0 | 13,712 |
from contextlib import contextmanager
from resource import getrusage, RUSAGE_SELF
import sys
import time
from invoke.vendor.six import wraps
from spec import skip
def current_cpu_usage():
rusage = getrusage(RUSAGE_SELF)
return rusage.ru_utime + rusage.ru_stime
@contextmanager
def assert_cpu_usage(lt, verbose=False):
"""
Execute wrapped block, asserting CPU utilization was less than ``lt``%.
:param float lt: CPU use percentage above which failure will occur.
:param bool verbose: Whether to print out the calculated percentage.
"""
start_usage = current_cpu_usage()
start_time = time.time()
yield
end_usage = current_cpu_usage()
end_time = time.time()
usage_diff = end_usage - start_usage
time_diff = end_time - start_time
if time_diff == 0: # Apparently possible!
time_diff = 0.000001
percentage = (usage_diff / time_diff) * 100.0
if verbose:
print("Used {0:.2}% CPU over {1:.2}s".format(percentage, time_diff))
assert percentage < lt
def only_utf8(f):
"""
Decorator causing tests to skip if local shell pipes aren't UTF-8.
"""
# TODO: use actual test selection labels or whatever nose has
@wraps(f)
def inner(*args, **kwargs):
if getattr(sys.stdout, 'encoding', None) == 'UTF-8':
return f(*args, **kwargs)
# TODO: could remove this so they show green, but figure yellow is more
# appropriate
skip()
return inner
| mkusz/invoke | integration/_util.py | Python | bsd-2-clause | 1,491 |
import sys
import logging
import os
from os.path import dirname, split
def install_service(argv):
from .servicehelpers import handle_command_line
new_argv = [dirname(__file__)]
for arg in argv:
new_argv.append(arg)
handle_command_line(new_argv)
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
from servicehelpers import XMSSServiceRunner, handle_command_line
from utils import get_config, process_jobs_in_folder, process_file_job, create_rotating_log
if len(sys.argv) == 2 and sys.argv[1].lower() == '--poll':
"""
Console mode polls a folder for SendSecure file jobs
"""
create_rotating_log(get_config('logging'), True)
logger = logging.getLogger("SSFilePollerRotatingLog")
try:
logger.info('Running in console mode')
while True:
process_jobs_in_folder(get_config('settings'))
except KeyboardInterrupt as e:
logger.info('<<<<<<<<< <<<<<<<< LOG STOP >>>>>>>>> >>>>>>>>>')
except Exception as e:
logger.error(str(e))
elif len(sys.argv) == 3 and sys.argv[1].lower() == '--file':
"""
One-Off mode processes the provided file job only
"""
create_rotating_log(get_config('logging'), True)
logger = logging.getLogger("SSFilePollerRotatingLog")
try:
logger.info('Running in One-Off mode')
process_file_job(sys.argv[2], get_config('settings'))
logger.info('<<<<<<<<< <<<<<<<< LOG STOP >>>>>>>>> >>>>>>>>>')
except Exception as e:
logger.error(str(e))
else:
"""
Handle command line for Windows service management
"""
try:
handle_command_line(sys.argv)
except SystemExit as e:
print('')
print('Usage for console mode: \'' + split(sys.argv[0])[1] + ' --poll|--file [...]\'')
print('Options for console mode:')
print(' --poll : starts module for polling for SendSecure file jobs in a folder. (CTRL-C to stop)')
print(' --file filename : One-Off mode. The file poller will process the provided file job only.')
raise
| xmedius/sendsecure-filepoller | sendsecurefilepoller/sendsecurefilepoller.py | Python | mit | 2,266 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=====================
Classifier comparison
=====================
A comparison of a several classifiers in scikit-learn on synthetic datasets.
The point of this example is to illustrate the nature of decision boundaries
of different classifiers.
This should be taken with a grain of salt, as the intuition conveyed by
these examples does not necessarily carry over to real datasets.
Particularly in high-dimensional spaces, data can more easily be separated
linearly and the simplicity of classifiers such as naive Bayes and linear SVMs
might lead to better generalization than is achieved by other classifiers.
The plots show training points in solid colors and testing points
semi-transparent. The lower right shows the classification accuracy on the test
set.
"""
print(__doc__)
# Code source: Gaël Varoquaux
# Andreas Müller
# Modified for documentation by Jaques Grobler
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import make_moons, make_circles, make_classification
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.lda import LDA
from sklearn.qda import QDA
h = .02 # step size in the mesh
names = ["3 Near. Neighb.", "Linear SVM", "RBF SVM", "RBF GPC",
"Decision Tree", "Random Forest", "AdaBoost", "Naive Bayes", "LDA",
"QDA"]
classifiers = [
KNeighborsClassifier(3),
SVC(kernel="linear", C=0.025),
SVC(gamma=2, C=1),
GaussianProcessClassifier(1.0 * RBF(1.0), warm_start=True),
DecisionTreeClassifier(max_depth=5),
RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1),
AdaBoostClassifier(),
GaussianNB(),
LDA(),
QDA()]
X, y = make_classification(n_features=2, n_redundant=0, n_informative=2,
random_state=1, n_clusters_per_class=1)
rng = np.random.RandomState(2)
X += 2 * rng.uniform(size=X.shape)
linearly_separable = (X, y)
datasets = [make_moons(noise=0.3, random_state=0),
make_circles(noise=0.2, factor=0.5, random_state=1),
linearly_separable
]
figure = plt.figure(figsize=(27, 9))
i = 1
# iterate over datasets
for ds in datasets:
# preprocess dataset, split into training and test part
X, y = ds
X = StandardScaler().fit_transform(X)
X_train, X_test, y_train, y_test = \
train_test_split(X, y, test_size=.4, random_state=42)
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# just plot the dataset first
cm = plt.cm.RdBu
cm_bright = ListedColormap(['#FF0000', '#0000FF'])
ax = plt.subplot(len(datasets), len(classifiers) + 1, i)
# Plot the training points
ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)
# and testing points
ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6)
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
ax.set_xticks(())
ax.set_yticks(())
i += 1
# iterate over classifiers
for name, clf in zip(names, classifiers):
ax = plt.subplot(len(datasets), len(classifiers) + 1, i)
clf.fit(X_train, y_train)
score = clf.score(X_test, y_test)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
if hasattr(clf, "decision_function"):
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
else:
Z = clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]
# Put the result into a color plot
Z = Z.reshape(xx.shape)
ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)
# Plot also the training points
ax.scatter(X_train[:, 0], X_train[:, 1], c=y_train, cmap=cm_bright)
# and testing points
ax.scatter(X_test[:, 0], X_test[:, 1], c=y_test, cmap=cm_bright,
alpha=0.6)
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
ax.set_xticks(())
ax.set_yticks(())
ax.set_title(name)
ax.text(xx.max() - .3, yy.min() + .3, ('%.2f' % score).lstrip('0'),
size=15, horizontalalignment='right')
i += 1
plt.tight_layout()
plt.show()
| fabianp/scikit-learn | examples/classification/plot_classifier_comparison.py | Python | bsd-3-clause | 4,895 |
import bstree
RED = 0
BLACK = 1
class RBTreeNode(bstree.BSTreeNode):
"""A node of a Red-Black Tree"""
def __init__(self, k, v, nil=None):
super(RBTreeNode, self).__init__(k, v, nil)
self.color = BLACK
self.size = 0
def update_size(self):
"""Update the size attribute using the size attribute of left and right childs.
Time complexity: O(1)"""
self.size = 1 + self.left.size + self.right.size
class RBTree(bstree.BSTree):
"""A Red-Black Binary Search Tree"""
def __init__(self, node_type=RBTreeNode):
super(RBTree, self).__init__(node_type)
self.nil.color = BLACK
def _left_rotate(self, x):
"""Perform a left rotation around node x
Time complexity: O(1)"""
y = x.right
x.right = y.left
if y.left is not self.nil:
y.left.parent = x
y.parent = x.parent
if x.parent is self.nil:
self.root = y
elif x is x.parent.left:
x.parent.left = y
else:
x.parent.right = y
y.left = x
x.parent = y
y.size = x.size
x.update_size()
def _right_rotate(self, x):
"""Perform a right rotation around node x
Time complexity: O(1)"""
y = x.left
x.left = y.right
if y.right is not self.nil:
y.right.parent = x
y.parent = x.parent
if x.parent is self.nil:
self.root = y
elif x is x.parent.right:
x.parent.right = y
else:
x.parent.left = y
y.right = x
x.parent = y
y.size = x.size
x.update_size()
def _insert_fixup(self, new):
"""Restore Red-Black properties of the tree after node insertion.
Time complexity: O(lg n)"""
while new.parent.color == RED:
if new.parent is new.parent.parent.left:
y = new.parent.parent.right
if y.color == RED:
new.parent.color = BLACK
y.color = BLACK
new.parent.parent.color = RED
new = new.parent.parent
else:
if new is new.parent.right:
new = new.parent
self._left_rotate(new)
new.parent.color = BLACK
new.parent.parent.color = RED
self._right_rotate(new.parent.parent)
else:
y = new.parent.parent.left
if y.color == RED:
new.parent.color = BLACK
y.color = BLACK
new.parent.parent.color = RED
new = new.parent.parent
else:
if new is new.parent.left:
new = new.parent
self._right_rotate(new)
new.parent.color = BLACK
new.parent.parent.color = RED
self._left_rotate(new.parent.parent)
self.root.color = BLACK
def _transplant(self, old, new):
"""Replace subtree rooted at node old with the subtree rooted at node new
Time complexity: O(1)"""
if old.parent is self.nil:
self.root = new
elif old is old.parent.left:
old.parent.left = new
else:
old.parent.right = new
new.parent = old.parent
def _delete_fixup(self, x):
"""Restore Red-Black properties of the tree after node deletion.
Time complexity: O(lg n)"""
while x is not self.root and x.color == BLACK:
if x is x.parent.left:
w = x.parent.right
if w.color == RED:
w.color = BLACK
x.parent.color = RED
self._left_rotate(x.parent)
w = x.parent.right
if w.left.color == BLACK and w.right.color == BLACK:
w.color = RED
x = x.parent
else:
if w.right.color == BLACK:
w.left.color = BLACK
w.color = RED
self._right_rotate(w)
w = x.parent.right
w.color = x.parent.color
x.parent.color = BLACK
w.right.color = BLACK
self._left_rotate(x.parent)
x = self.root
else:
w = x.parent.left
if w.color == RED:
w.color = BLACK
x.parent.color = RED
self._right_rotate(x.parent)
w = x.parent.left
if w.right.color == BLACK and w.left.color == BLACK:
w.color = RED
x = x.parent
else:
if w.left.color == BLACK:
w.right.color = BLACK
w.color = RED
self._left_rotate(w)
w = x.parent.left
w.color = x.parent.color
x.parent.color = BLACK
w.left.color = BLACK
self._right_rotate(x.parent)
x = self.root
x.color = BLACK
def _update_size(self, node):
"""Updates the size attribute on all nodes from node to the root.
Time complexity: O(lg n)"""
while node is not self.nil:
node.update_size()
node = node.parent
def deletekey(self, k):
node = self.find(k)
if node is not None:
node = self.delete(node)
return node
def delete(self, node):
"""Delete node from the tree, preserving all red-black properties.
Returns the deleted node.
Time complexity: O(lg n)"""
y = node
y_orig_color = y.color
if node.left is self.nil:
x = node.right
sz_update = node.parent
self._transplant(node, node.right)
self._update_size(sz_update)
elif node.right is self.nil:
x = node.left
sz_update = node.parent
self._transplant(node, node.left)
self._update_size(sz_update)
else:
y = self.minimum(node.right)
y_orig_color = y.color
x = y.right
if y.parent is node:
sz_update = y
x.parent = y
else:
sz_update = y.parent
self._transplant(y, y.right)
y.right = node.right
y.right.parent = y
self._transplant(node, y)
y.left = node.left
y.left.parent = y
y.color = node.color
self._update_size(sz_update)
if y_orig_color == BLACK:
self._delete_fixup(x)
return node
def update(self, x, new):
"""Set change value(s) of node x to those from node new
Time Complexity: O(1)
"""
x.value = new.value
def insert_node(self, new):
"""Insert a new node with distinct key k and value v into the tree, preserving all red-black properties.
Returns the newly inserted/updated node
Time complexity: O(lg n)"""
y = self.nil
x = self.root
while x is not self.nil:
y = x
if new.key < x.key:
x = x.left
elif new.key > x.key:
x = x.right
else:
# key is already in tree
self.update(x, new)
return x
new.parent = y
if y is self.nil:
self.root = new
elif new.key < y.key:
y.left = new
else:
y.right = new
new.left = self.nil
new.right = self.nil
new.color = RED
new.size = 1
self._update_size(new)
self._insert_fixup(new)
return new
def size(self):
"""Returns the number of nodes stored in the tree.
Time complexity: O(1)"""
return self.root.size
| anonion0/nsec3map | n3map/tree/rbtree.py | Python | gpl-3.0 | 8,290 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RGoDb(RPackage):
"""A set of annotation maps describing the entire Gene Ontology
A set of annotation maps describing the entire Gene
Ontology assembled using data from GO."""
homepage = "https://www.bioconductor.org/packages/GO.db/"
url = "https://www.bioconductor.org/packages/3.5/data/annotation/src/contrib/GO.db_3.4.1.tar.gz"
version('3.12.1',
sha256='e0316959d3d32096f9432c897413dff74fce53e15ead7917a7724467d971dab9',
url='https://bioconductor.org/packages/3.12/data/annotation/src/contrib/GO.db_3.12.1.tar.gz')
version('3.4.1',
sha256='2fc2048e9d26edb98e35e4adc4d18c6df54f44836b5cc4a482d36ed99e058cc1',
url='https://bioconductor.org/packages/3.5/data/annotation/src/contrib/GO.db_3.4.1.tar.gz')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@3.12.1:', type=('build', 'run'))
| LLNL/spack | var/spack/repos/builtin/packages/r-go-db/package.py | Python | lgpl-2.1 | 1,209 |
from __future__ import print_function, unicode_literals
import argparse
import ConfigParser
import sys
import requests
from .utils import Session
def parse_arguments(argv):
parser = argparse.ArgumentParser(
description="Delete JIRA users",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("--debug", default="")
parser.add_argument("--no-save", dest="save",
action="store_const", const=False, default=True,
help="Don't save the users specified in the config file",
)
args = parser.parse_args(argv[1:])
args.debug = args.debug.split(",")
return args
CMDLINE_ARGS = parse_arguments(sys.argv)
config = ConfigParser.SafeConfigParser()
files_read = config.read("config.ini")
if not files_read:
print("Couldn't read config.ini")
sys.exit(1)
session = Session(
nick=" new",
host=config.get("destination", "host"),
username=config.get("destination", "username"),
password=config.get("destination", "password"),
debug="requests" in CMDLINE_ARGS.debug,
)
try:
users_to_save = set(config.get("users", "save").split(","))
except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
users_to_save = set()
def iter_users_in_group(host, group="jira-users", session=None, start=0, **fields):
session = session or requests.Session()
more_results = True
while more_results:
result_url = (
host.with_path("/rest/api/2/group")
.set_query_param("groupname", group)
.set_query_param("expand", "users[{start}:{end}]".format(
start=start, end=start + 50))
.set_query_params(**fields)
)
result_resp = session.get(result_url)
result = result_resp.json()
for obj in result["users"]["items"]:
yield obj
returned = len(result["users"]["items"])
total = result["users"]["size"]
if start + returned < total:
start += returned
else:
more_results = False
def delete_jira_users():
user_gen = iter_users_in_group(host=session.host, session=session)
for user in user_gen:
if user["name"] not in users_to_save:
delete_url = (
session.host.with_path("/rest/api/2/user")
.set_query_param("username", user["name"])
)
delete_resp = session.delete(delete_url)
if not delete_resp.ok:
raise ValueError(delete_resp.text)
| singingwolfboy/jira-migrate | jira_migrate/users.py | Python | mit | 2,540 |
"""
Browser set up for acceptance tests.
"""
# pylint: disable=no-member
# pylint: disable=unused-argument
from base64 import encodestring
from json import dumps
from logging import getLogger
import requests
from django.conf import settings
from django.core.management import call_command
from lettuce import after, before, world
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from splinter.browser import Browser
import xmodule.modulestore.django
from xmodule.contentstore.django import _CONTENTSTORE
LOGGER = getLogger(__name__)
LOGGER.info("Loading the lettuce acceptance testing terrain file...")
MAX_VALID_BROWSER_ATTEMPTS = 20
GLOBAL_SCRIPT_TIMEOUT = 60
def get_saucelabs_username_and_key():
"""
Returns the Sauce Labs username and access ID as set by environment variables
"""
return {"username": settings.SAUCE.get('USERNAME'), "access-key": settings.SAUCE.get('ACCESS_ID')}
def set_saucelabs_job_status(jobid, passed=True):
"""
Sets the job status on sauce labs
"""
config = get_saucelabs_username_and_key()
url = 'http://saucelabs.com/rest/v1/{}/jobs/{}'.format(config['username'], world.jobid)
body_content = dumps({"passed": passed})
base64string = encodestring('{}:{}'.format(config['username'], config['access-key']))[:-1]
headers = {"Authorization": "Basic {}".format(base64string)}
result = requests.put(url, data=body_content, headers=headers)
return result.status_code == 200
def make_saucelabs_desired_capabilities():
"""
Returns a DesiredCapabilities object corresponding to the environment sauce parameters
"""
desired_capabilities = settings.SAUCE.get('BROWSER', DesiredCapabilities.CHROME)
desired_capabilities['platform'] = settings.SAUCE.get('PLATFORM')
desired_capabilities['version'] = settings.SAUCE.get('VERSION')
desired_capabilities['device-type'] = settings.SAUCE.get('DEVICE')
desired_capabilities['name'] = settings.SAUCE.get('SESSION')
desired_capabilities['build'] = settings.SAUCE.get('BUILD')
desired_capabilities['video-upload-on-pass'] = False
desired_capabilities['sauce-advisor'] = False
desired_capabilities['capture-html'] = True
desired_capabilities['record-screenshots'] = True
desired_capabilities['selenium-version'] = "2.34.0"
desired_capabilities['max-duration'] = 3600
desired_capabilities['public'] = 'public restricted'
return desired_capabilities
@before.harvest
def initial_setup(server):
"""
Launch the browser once before executing the tests.
"""
world.absorb(settings.LETTUCE_SELENIUM_CLIENT, 'LETTUCE_SELENIUM_CLIENT')
if world.LETTUCE_SELENIUM_CLIENT == 'local':
browser_driver = getattr(settings, 'LETTUCE_BROWSER', 'chrome')
if browser_driver == 'chrome':
desired_capabilities = DesiredCapabilities.CHROME
desired_capabilities['loggingPrefs'] = {
'browser': 'ALL',
}
else:
desired_capabilities = {}
# There is an issue with ChromeDriver2 r195627 on Ubuntu
# in which we sometimes get an invalid browser session.
# This is a work-around to ensure that we get a valid session.
success = False
num_attempts = 0
while (not success) and num_attempts < MAX_VALID_BROWSER_ATTEMPTS:
# Load the browser and try to visit the main page
# If the browser couldn't be reached or
# the browser session is invalid, this will
# raise a WebDriverException
try:
if browser_driver == 'firefox':
# Lettuce initializes differently for firefox, and sending
# desired_capabilities will not work. So initialize without
# sending desired_capabilities.
world.browser = Browser(browser_driver)
else:
world.browser = Browser(browser_driver, desired_capabilities=desired_capabilities)
world.browser.driver.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
world.visit('/')
except WebDriverException:
LOGGER.warn("Error acquiring %s browser, retrying", browser_driver, exc_info=True)
if hasattr(world, 'browser'):
world.browser.quit()
num_attempts += 1
else:
success = True
# If we were unable to get a valid session within the limit of attempts,
# then we cannot run the tests.
if not success:
raise IOError("Could not acquire valid {driver} browser session.".format(driver=browser_driver))
world.absorb(0, 'IMPLICIT_WAIT')
world.browser.driver.set_window_size(1280, 1024)
elif world.LETTUCE_SELENIUM_CLIENT == 'saucelabs':
config = get_saucelabs_username_and_key()
world.browser = Browser(
'remote',
url="http://{}:{}@ondemand.saucelabs.com:80/wd/hub".format(config['username'], config['access-key']),
**make_saucelabs_desired_capabilities()
)
world.absorb(30, 'IMPLICIT_WAIT')
world.browser.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
elif world.LETTUCE_SELENIUM_CLIENT == 'grid':
world.browser = Browser(
'remote',
url=settings.SELENIUM_GRID.get('URL'),
browser=settings.SELENIUM_GRID.get('BROWSER'),
)
world.absorb(30, 'IMPLICIT_WAIT')
world.browser.driver.set_script_timeout(GLOBAL_SCRIPT_TIMEOUT)
else:
raise Exception("Unknown selenium client '{}'".format(world.LETTUCE_SELENIUM_CLIENT))
world.browser.driver.implicitly_wait(world.IMPLICIT_WAIT)
world.absorb(world.browser.driver.session_id, 'jobid')
@before.each_scenario
def reset_data(scenario):
"""
Clean out the django test database defined in the
envs/acceptance.py file: edx-platform/db/test_edx.db
"""
LOGGER.debug("Flushing the test database...")
call_command('flush', interactive=False, verbosity=0)
world.absorb({}, 'scenario_dict')
@before.each_scenario
def configure_screenshots(scenario):
"""
Before each scenario, turn off automatic screenshots.
Args: str, scenario. Name of current scenario.
"""
world.auto_capture_screenshots = False
@after.each_scenario
def clear_data(scenario):
world.spew('scenario_dict')
@after.each_scenario
def reset_databases(scenario):
'''
After each scenario, all databases are cleared/dropped. Contentstore data are stored in unique databases
whereas modulestore data is in unique collection names. This data is created implicitly during the scenarios.
If no data is created during the test, these lines equivilently do nothing.
'''
xmodule.modulestore.django.modulestore()._drop_database() # pylint: disable=protected-access
xmodule.modulestore.django.clear_existing_modulestores()
_CONTENTSTORE.clear()
@world.absorb
def capture_screenshot(image_name):
"""
Capture a screenshot outputting it to a defined directory.
This function expects only the name of the file. It will generate
the full path of the output screenshot.
If the name contains spaces, they ill be converted to underscores.
"""
output_dir = '{}/log/auto_screenshots'.format(settings.TEST_ROOT)
image_name = '{}/{}.png'.format(output_dir, image_name.replace(' ', '_'))
try:
world.browser.driver.save_screenshot(image_name)
except WebDriverException:
LOGGER.error("Could not capture a screenshot '{}'".format(image_name))
@after.each_scenario
def screenshot_on_error(scenario):
"""
Save a screenshot to help with debugging.
"""
if scenario.failed:
try:
output_dir = '{}/log'.format(settings.TEST_ROOT)
image_name = '{}/{}.png'.format(output_dir, scenario.name.replace(' ', '_'))
world.browser.driver.save_screenshot(image_name)
except WebDriverException:
LOGGER.error('Could not capture a screenshot')
@after.each_scenario
def capture_console_log(scenario):
"""
Save the console log to help with debugging.
"""
if scenario.failed:
log = world.browser.driver.get_log('browser')
try:
output_dir = '{}/log'.format(settings.TEST_ROOT)
file_name = '{}/{}.log'.format(output_dir, scenario.name.replace(' ', '_'))
with open(file_name, 'w') as output_file:
for line in log:
output_file.write("{}{}".format(dumps(line), '\n'))
except WebDriverException:
LOGGER.error('Could not capture the console log')
def capture_screenshot_for_step(step, when):
"""
Useful method for debugging acceptance tests that are run in Vagrant.
This method runs automatically before and after each step of an acceptance
test scenario. The variable:
world.auto_capture_screenshots
either enables or disabled the taking of screenshots. To change the
variable there is a convenient step defined:
I (enable|disable) auto screenshots
If you just want to capture a single screenshot at a desired point in code,
you should use the method:
world.capture_screenshot("image_name")
"""
if world.auto_capture_screenshots:
scenario_num = step.scenario.feature.scenarios.index(step.scenario) + 1
step_num = step.scenario.steps.index(step) + 1
step_func_name = step.defined_at.function.func_name
image_name = "{prefix:03d}__{num:03d}__{name}__{postfix}".format(
prefix=scenario_num,
num=step_num,
name=step_func_name,
postfix=when
)
world.capture_screenshot(image_name)
@before.each_step
def before_each_step(step):
capture_screenshot_for_step(step, '1_before')
@after.each_step
def after_each_step(step):
capture_screenshot_for_step(step, '2_after')
@after.harvest
def saucelabs_status(total):
"""
Collect data for saucelabs.
"""
if world.LETTUCE_SELENIUM_CLIENT == 'saucelabs':
set_saucelabs_job_status(world.jobid, total.scenarios_ran == total.scenarios_passed)
| pepeportela/edx-platform | common/djangoapps/terrain/browser.py | Python | agpl-3.0 | 10,330 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def make_name(name: str) -> str:
# Sample function parameter name in delete_data_item_sample
name = name
return name
| sasha-gitg/python-aiplatform | .sample_configs/param_handlers/delete_data_item_sample.py | Python | apache-2.0 | 708 |
from __future__ import absolute_import, unicode_literals
from django_pg.utils.gis import gis_backend
if gis_backend:
from django.contrib.gis.db.models import *
from django_pg.models.base import Model, Manager, GeoManager
else:
from django.db.models import *
from django_pg.models.base import Model, Manager
from django_pg.models.fields import *
| lukesneeringer/django-pgfields | django_pg/models/__init__.py | Python | bsd-3-clause | 363 |
from datetime import timedelta
from functools import partial
import itertools
import json
import operator
from operator import add
import os
from time import time, sleep
import sys
import pytest
from tornado import gen
from tornado.queues import Queue
from tornado.ioloop import IOLoop
import streamz as sz
from streamz import Stream, RefCounter
from streamz.sources import sink_to_file, PeriodicCallback
from streamz.utils_test import (inc, double, gen_test, tmpfile, captured_logger, # noqa: F401
clean, await_for, metadata, wait_for) # noqa: F401
from distributed.utils_test import loop # noqa: F401
def test_basic():
source = Stream()
b1 = source.map(inc)
b2 = source.map(double)
c = b1.scan(add)
Lc = c.sink_to_list()
Lb = b2.sink_to_list()
for i in range(4):
source.emit(i)
assert Lc == [1, 3, 6, 10]
assert Lb == [0, 2, 4, 6]
def test_no_output():
source = Stream()
assert source.emit(1) is None
def test_scan():
source = Stream()
def f(acc, i):
acc = acc + i
return acc, acc
L = source.scan(f, returns_state=True).sink_to_list()
for i in range(3):
source.emit(i)
assert L == [0, 1, 3]
def test_kwargs():
source = Stream()
def f(acc, x, y=None):
acc = acc + x + y
return acc
L = source.scan(f, y=10).sink_to_list()
for i in range(3):
source.emit(i)
assert L == [0, 11, 23]
def test_filter():
source = Stream()
L = source.filter(lambda x: x % 2 == 0).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [0, 2, 4, 6, 8]
def test_filter_args():
source = Stream()
L = source.filter(lambda x, n: x % n == 0, 2).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [0, 2, 4, 6, 8]
def test_filter_kwargs():
source = Stream()
L = source.filter(lambda x, n=1: x % n == 0, n=2).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [0, 2, 4, 6, 8]
def test_filter_none():
source = Stream()
L = source.filter(None).sink_to_list()
for i in range(10):
source.emit(i % 3)
assert L == [1, 2, 1, 2, 1, 2]
def test_map():
def add(x=0, y=0):
return x + y
source = Stream()
L = source.map(add, y=10).sink_to_list()
source.emit(1)
assert L[0] == 11
def test_map_args():
source = Stream()
L = source.map(operator.add, 10).sink_to_list()
source.emit(1)
assert L == [11]
def test_starmap():
def add(x=0, y=0):
return x + y
source = Stream()
L = source.starmap(add).sink_to_list()
source.emit((1, 10))
assert L[0] == 11
def test_remove():
source = Stream()
L = source.remove(lambda x: x % 2 == 0).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [1, 3, 5, 7, 9]
def test_partition():
source = Stream()
L = source.partition(2).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [(0, 1), (2, 3), (4, 5), (6, 7), (8, 9)]
def test_partition_timeout():
source = Stream()
L = source.partition(10, timeout=0.01).sink_to_list()
for i in range(5):
source.emit(i)
sleep(0.1)
assert L == [(0, 1, 2, 3, 4)]
def test_partition_timeout_cancel():
source = Stream()
L = source.partition(3, timeout=0.1).sink_to_list()
for i in range(3):
source.emit(i)
sleep(0.09)
source.emit(3)
sleep(0.02)
assert L == [(0, 1, 2)]
sleep(0.09)
assert L == [(0, 1, 2), (3,)]
def test_partition_key():
source = Stream()
L = source.partition(2, key=0).sink_to_list()
for i in range(4):
source.emit((i % 2, i))
assert L == [((0, 0), (0, 2)), ((1, 1), (1, 3))]
def test_partition_key_callable():
source = Stream()
L = source.partition(2, key=lambda x: x % 2).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [(0, 2), (1, 3), (4, 6), (5, 7)]
def test_partition_size_one():
source = Stream()
source.partition(1, timeout=.01).sink(lambda x: None)
for i in range(10):
source.emit(i)
def test_sliding_window():
source = Stream()
L = source.sliding_window(2).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [(0, ), (0, 1), (1, 2), (2, 3), (3, 4), (4, 5),
(5, 6), (6, 7), (7, 8), (8, 9)]
L = source.sliding_window(2, return_partial=False).sink_to_list()
for i in range(10):
source.emit(i)
assert L == [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5),
(5, 6), (6, 7), (7, 8), (8, 9)]
def test_sliding_window_ref_counts():
source = Stream()
_ = source.sliding_window(2)
r_prev = RefCounter()
source.emit(-2)
source.emit(-1, metadata=[{'ref': r_prev}])
for i in range(10):
r = RefCounter()
assert r_prev.count == 1
source.emit(i, metadata=[{'ref': r}])
assert r_prev.count == 0
assert r.count == 1
r_prev = r
def test_sliding_window_metadata():
source = Stream()
L = metadata(source.sliding_window(2)).sink_to_list()
source.emit(0)
source.emit(1, metadata=[{'v': 1}])
source.emit(2, metadata=[{'v': 2}])
source.emit(3, metadata=[{'v': 3}])
assert L == [
[{'v': 1}], # First emit, because 0 has no metadata
[{'v': 1}, {'v': 2}], # Second emit
[{'v': 2}, {'v': 3}] # Third emit
]
@gen_test()
def test_backpressure():
q = Queue(maxsize=2)
source = Stream(asynchronous=True)
source.map(inc).scan(add, start=0).sink(q.put)
@gen.coroutine
def read_from_q():
while True:
yield q.get()
yield gen.sleep(0.1)
IOLoop.current().add_callback(read_from_q)
start = time()
for i in range(5):
yield source.emit(i)
end = time()
assert end - start >= 0.2
@gen_test()
def test_timed_window():
source = Stream(asynchronous=True)
a = source.timed_window(0.01)
assert a.loop is IOLoop.current()
L = a.sink_to_list()
for i in range(10):
yield source.emit(i)
yield gen.sleep(0.004)
yield gen.sleep(a.interval)
assert L
assert sum(L, []) == list(range(10))
assert all(len(x) <= 3 for x in L)
assert any(len(x) >= 2 for x in L)
yield gen.sleep(0.1)
assert not L[-1]
@gen_test()
def test_timed_window_ref_counts():
source = Stream()
_ = source.timed_window(0.01)
ref1 = RefCounter()
source.emit(1, metadata=[{'ref': ref1}])
assert ref1.count == 1
yield gen.sleep(0.05)
ref2 = RefCounter()
source.emit(2, metadata=[{'ref': ref2}])
assert ref1.count == 0
assert ref2.count == 1
@gen_test()
def test_timed_window_metadata():
source = Stream()
L = metadata(source.timed_window(0.01)).sink_to_list()
source.emit(0)
source.emit(1, metadata=[{'v': 1}])
yield gen.sleep(0.1)
source.emit(2, metadata=[{'v': 2}])
source.emit(3, metadata=[{'v': 3}])
yield gen.sleep(0.1)
assert L == [
[{'v': 1}], # first emit because 0 has no metadata
[{'v': 2}, {'v': 3}] # second emit
]
def test_timed_window_timedelta(clean): # noqa: F811
pytest.importorskip('pandas')
source = Stream(asynchronous=True)
a = source.timed_window('10ms')
assert a.interval == 0.010
@gen_test()
def test_timed_window_backpressure():
q = Queue(maxsize=1)
source = Stream(asynchronous=True)
source.timed_window(0.01).sink(q.put)
@gen.coroutine
def read_from_q():
while True:
yield q.get()
yield gen.sleep(0.1)
IOLoop.current().add_callback(read_from_q)
start = time()
for i in range(5):
yield source.emit(i)
yield gen.sleep(0.01)
stop = time()
assert stop - start > 0.2
def test_sink_to_file():
with tmpfile() as fn:
source = Stream()
with sink_to_file(fn, source) as f:
source.emit('a')
source.emit('b')
with open(fn) as f:
data = f.read()
assert data == 'a\nb\n'
def test_sink_with_args_and_kwargs():
L = dict()
def mycustomsink(elem, key, prefix=""):
key = prefix + key
if key not in L:
L[key] = list()
L[key].append(elem)
s = Stream()
s.sink(mycustomsink, "cat", "super")
s.emit(1)
s.emit(2)
assert L['supercat'] == [1, 2]
@gen_test()
def test_counter():
counter = itertools.count()
source = PeriodicCallback(lambda: next(counter), 0.001, asynchronous=True)
L = source.sink_to_list()
yield gen.sleep(0.05)
assert L
@gen_test()
def test_rate_limit():
source = Stream(asynchronous=True)
L = source.rate_limit(0.05).sink_to_list()
start = time()
for i in range(5):
yield source.emit(i)
stop = time()
assert stop - start > 0.2
assert len(L) == 5
@gen_test()
def test_delay():
source = Stream(asynchronous=True)
L = source.delay(0.02).sink_to_list()
for i in range(5):
yield source.emit(i)
assert not L
yield gen.sleep(0.04)
assert len(L) < 5
yield gen.sleep(0.1)
assert len(L) == 5
@gen_test()
def test_delay_ref_counts():
source = Stream(asynchronous=True)
_ = source.delay(0.01)
refs = []
for i in range(5):
r = RefCounter()
refs.append(r)
source.emit(i, metadata=[{'ref': r}])
assert all(r.count == 1 for r in refs)
yield gen.sleep(0.05)
assert all(r.count == 0 for r in refs)
@gen_test()
def test_buffer():
source = Stream(asynchronous=True)
L = source.map(inc).buffer(10).map(inc).rate_limit(0.05).sink_to_list()
start = time()
for i in range(10):
yield source.emit(i)
stop = time()
assert stop - start < 0.01
assert not L
start = time()
for i in range(5):
yield source.emit(i)
stop = time()
assert L
assert stop - start > 0.04
@gen_test()
def test_buffer_ref_counts():
source = Stream(asynchronous=True)
_ = source.buffer(5)
refs = []
for i in range(5):
r = RefCounter()
refs.append(r)
source.emit(i, metadata=[{'ref': r}])
assert all(r.count == 1 for r in refs)
yield gen.sleep(0.05)
assert all(r.count == 0 for r in refs)
def test_zip():
a = Stream()
b = Stream()
c = sz.zip(a, b)
L = c.sink_to_list()
a.emit(1)
b.emit('a')
a.emit(2)
b.emit('b')
assert L == [(1, 'a'), (2, 'b')]
d = Stream()
# test zip from the object itself
# zip 3 streams together
e = a.zip(b, d)
L2 = e.sink_to_list()
a.emit(1)
b.emit(2)
d.emit(3)
assert L2 == [(1, 2, 3)]
def test_zip_literals():
a = Stream()
b = Stream()
c = sz.zip(a, 123, b)
L = c.sink_to_list()
a.emit(1)
b.emit(2)
assert L == [(1, 123, 2)]
a.emit(4)
b.emit(5)
assert L == [(1, 123, 2),
(4, 123, 5)]
def test_zip_same():
a = Stream()
b = a.zip(a)
L = b.sink_to_list()
a.emit(1)
a.emit(2)
assert L == [(1, 1), (2, 2)]
def test_combine_latest():
a = Stream()
b = Stream()
c = a.combine_latest(b)
d = a.combine_latest(b, emit_on=[a, b])
L = c.sink_to_list()
L2 = d.sink_to_list()
a.emit(1)
a.emit(2)
b.emit('a')
a.emit(3)
b.emit('b')
assert L == [(2, 'a'), (3, 'a'), (3, 'b')]
assert L2 == [(2, 'a'), (3, 'a'), (3, 'b')]
def test_combine_latest_emit_on():
a = Stream()
b = Stream()
c = a.combine_latest(b, emit_on=a)
L = c.sink_to_list()
a.emit(1)
b.emit('a')
a.emit(2)
a.emit(3)
b.emit('b')
a.emit(4)
assert L == [(2, 'a'), (3, 'a'), (4, 'b')]
def test_combine_latest_emit_on_stream():
a = Stream()
b = Stream()
c = a.combine_latest(b, emit_on=0)
L = c.sink_to_list()
a.emit(1)
b.emit('a')
a.emit(2)
a.emit(3)
b.emit('b')
a.emit(4)
assert L == [(2, 'a'), (3, 'a'), (4, 'b')]
def test_combine_latest_ref_counts():
a = Stream()
b = Stream()
_ = a.combine_latest(b)
ref1 = RefCounter()
a.emit(1, metadata=[{'ref': ref1}])
assert ref1.count == 1
# The new value kicks out the old value
ref2 = RefCounter()
a.emit(2, metadata=[{'ref': ref2}])
assert ref1.count == 0
assert ref2.count == 1
# The value on stream a is still retained and the value on stream b is new
ref3 = RefCounter()
b.emit(3, metadata=[{'ref': ref3}])
assert ref2.count == 1
assert ref3.count == 1
def test_combine_latest_metadata():
a = Stream()
b = Stream()
L = metadata(a.combine_latest(b)).sink_to_list()
a.emit(1, metadata=[{'v': 1}])
b.emit(2, metadata=[{'v': 2}])
b.emit(3)
b.emit(4, metadata=[{'v': 4}])
assert L == [
[{'v': 1}, {'v': 2}], # first emit when 2 is introduced
[{'v': 1}], # 3 has no metadata but it replaces the value on 'b'
[{'v': 1}, {'v': 4}] # 4 replaces the value without metadata on 'b'
]
@gen_test()
def test_zip_timeout():
a = Stream(asynchronous=True)
b = Stream(asynchronous=True)
c = sz.zip(a, b, maxsize=2)
L = c.sink_to_list()
a.emit(1)
a.emit(2)
future = a.emit(3)
with pytest.raises(gen.TimeoutError):
yield gen.with_timeout(timedelta(seconds=0.01), future)
b.emit('a')
yield future
assert L == [(1, 'a')]
def test_zip_ref_counts():
a = Stream()
b = Stream()
_ = a.zip(b)
# The first value in a becomes buffered
ref1 = RefCounter()
a.emit(1, metadata=[{'ref': ref1}])
assert ref1.count == 1
# The second value in a also becomes buffered
ref2 = RefCounter()
a.emit(2, metadata=[{'ref': ref2}])
assert ref1.count == 1
assert ref2.count == 1
# All emitted values are removed from the buffer
ref3 = RefCounter()
b.emit(3, metadata=[{'ref': ref3}])
assert ref1.count == 0
assert ref2.count == 1 # still in the buffer
assert ref3.count == 0
def test_zip_metadata():
a = Stream()
b = Stream()
L = metadata(a.zip(b)).sink_to_list()
a.emit(1, metadata=[{'v': 1}])
b.emit(2, metadata=[{'v': 2}])
a.emit(3)
b.emit(4, metadata=[{'v': 4}])
assert L == [
[{'v': 1}, {'v': 2}], # first emit when 2 is introduced
[{'v': 4}] # second emit when 4 is introduced, and 3 has no metadata
]
def test_frequencies():
source = Stream()
L = source.frequencies().sink_to_list()
source.emit('a')
source.emit('b')
source.emit('a')
assert L[-1] == {'a': 2, 'b': 1}
def test_flatten():
source = Stream()
L = source.flatten().sink_to_list()
source.emit([1, 2, 3])
source.emit([4, 5])
source.emit([6, 7, 8])
assert L == [1, 2, 3, 4, 5, 6, 7, 8]
def test_unique():
source = Stream()
L = source.unique().sink_to_list()
source.emit(1)
source.emit(2)
source.emit(1)
assert L == [1, 2]
def test_unique_key():
source = Stream()
L = source.unique(key=lambda x: x % 2, maxsize=1).sink_to_list()
source.emit(1)
source.emit(2)
source.emit(4)
source.emit(6)
source.emit(3)
assert L == [1, 2, 3]
def test_unique_metadata():
source = Stream()
L = metadata(source.unique()).flatten().sink_to_list()
for i in range(5):
source.emit(i, metadata=[{'v': i}])
assert L == [{'v': i} for i in range(5)]
def test_unique_history():
source = Stream()
s = source.unique(maxsize=2)
s2 = source.unique(maxsize=2, hashable=False)
L = s.sink_to_list()
L2 = s2.sink_to_list()
source.emit(1)
source.emit(2)
source.emit(1)
source.emit(2)
source.emit(1)
source.emit(2)
assert L == [1, 2]
assert L == L2
source.emit(3)
source.emit(2)
assert L == [1, 2, 3]
assert L == L2
source.emit(1)
assert L == [1, 2, 3, 1]
assert L == L2
# update 2 position
source.emit(2)
# knock out 1
source.emit(3)
# update 2 position
source.emit(2)
assert L == [1, 2, 3, 1, 3]
assert L == L2
def test_unique_history_dict():
source = Stream()
s = source.unique(maxsize=2, hashable=False)
L = s.sink_to_list()
a = {'hi': 'world'}
b = {'hi': 'bar'}
c = {'foo': 'bar'}
source.emit(a)
source.emit(b)
source.emit(a)
source.emit(b)
source.emit(a)
source.emit(b)
assert L == [a, b]
source.emit(c)
source.emit(b)
assert L == [a, b, c]
source.emit(a)
assert L == [a, b, c, a]
def test_union():
a = Stream()
b = Stream()
c = Stream()
L = a.union(b, c).sink_to_list()
a.emit(1)
assert L == [1]
b.emit(2)
assert L == [1, 2]
a.emit(3)
assert L == [1, 2, 3]
c.emit(4)
assert L == [1, 2, 3, 4]
def test_pluck():
a = Stream()
L = a.pluck(1).sink_to_list()
a.emit([1, 2, 3])
assert L == [2]
a.emit([4, 5, 6, 7, 8, 9])
assert L == [2, 5]
with pytest.raises(IndexError):
a.emit([1])
def test_pluck_list():
a = Stream()
L = a.pluck([0, 2]).sink_to_list()
a.emit([1, 2, 3])
assert L == [(1, 3)]
a.emit([4, 5, 6, 7, 8, 9])
assert L == [(1, 3), (4, 6)]
with pytest.raises(IndexError):
a.emit([1])
def test_collect():
source1 = Stream()
source2 = Stream()
collector = source1.collect()
L = collector.sink_to_list()
source2.sink(collector.flush)
source1.emit(1)
source1.emit(2)
assert L == []
source2.emit('anything') # flushes collector
assert L == [(1, 2)]
source2.emit('anything')
assert L == [(1, 2), ()]
source1.emit(3)
assert L == [(1, 2), ()]
source2.emit('anything')
assert L == [(1, 2), (), (3,)]
def test_collect_ref_counts():
source = Stream()
collector = source.collect()
refs = []
for i in range(10):
r = RefCounter()
refs.append(r)
source.emit(i, metadata=[{'ref': r}])
assert all(r.count == 1 for r in refs)
collector.flush()
assert all(r.count == 0 for r in refs)
def test_collect_metadata():
source = Stream()
collector = source.collect()
L = metadata(collector).sink_to_list()
source.emit(0)
source.emit(1, metadata=[{'v': 1}])
source.emit(2, metadata=[{'v': 2}])
collector.flush()
source.emit(3, metadata=[{'v': 3}])
source.emit(4, metadata=[{'v': 4}])
collector.flush()
assert L == [
[{'v': 1}, {'v': 2}], # Flush 0-2, but 0 has no metadata
[{'v': 3}, {'v': 4}] # Flush the rest
]
def test_map_str():
def add(x=0, y=0):
return x + y
source = Stream()
s = source.map(add, y=10)
assert str(s) == '<map: add>'
def test_filter_str():
def iseven(x):
return x % 2 == 0
source = Stream()
s = source.filter(iseven)
assert str(s) == '<filter: iseven>'
def test_timed_window_str(clean): # noqa: F811
source = Stream()
s = source.timed_window(.05)
assert str(s) == '<timed_window: 0.05>'
def test_partition_str():
source = Stream()
s = source.partition(2)
assert str(s) == '<partition: 2>'
def test_partition_ref_counts():
source = Stream()
_ = source.partition(2)
for i in range(10):
r = RefCounter()
source.emit(i, metadata=[{'ref': r}])
if i % 2 == 0:
assert r.count == 1
else:
assert r.count == 0
def test_partition_metadata():
source = Stream()
L = metadata(source.partition(2)).sink_to_list()
source.emit(0)
source.emit(1, metadata=[{'v': 1}])
source.emit(2, metadata=[{'v': 2}])
source.emit(3, metadata=[{'v': 3}])
assert L == [
[{'v': 1}], # first emit when 1 is introduced. 0 has no metadata
[{'v': 2}, {'v': 3}] # second emit
]
def test_stream_name_str():
source = Stream(stream_name='this is not a stream')
assert str(source) == '<this is not a stream; Stream>'
def test_zip_latest():
a = Stream()
b = Stream()
c = a.zip_latest(b)
d = a.combine_latest(b, emit_on=a)
L = c.sink_to_list()
L2 = d.sink_to_list()
a.emit(1)
a.emit(2)
b.emit('a')
b.emit('b')
a.emit(3)
assert L == [(1, 'a'), (2, 'a'), (3, 'b')]
assert L2 == [(3, 'b')]
def test_zip_latest_reverse():
a = Stream()
b = Stream()
c = a.zip_latest(b)
L = c.sink_to_list()
b.emit('a')
a.emit(1)
a.emit(2)
a.emit(3)
b.emit('b')
a.emit(4)
assert L == [(1, 'a'), (2, 'a'), (3, 'a'), (4, 'b')]
def test_triple_zip_latest():
from streamz.core import Stream
s1 = Stream()
s2 = Stream()
s3 = Stream()
s_simple = s1.zip_latest(s2, s3)
L_simple = s_simple.sink_to_list()
s1.emit(1)
s2.emit('I')
s2.emit("II")
s1.emit(2)
s2.emit("III")
s3.emit('a')
s3.emit('b')
s1.emit(3)
assert L_simple == [(1, 'III', 'a'), (2, 'III', 'a'), (3, 'III', 'b')]
def test_zip_latest_ref_counts():
a = Stream()
b = Stream()
_ = a.zip_latest(b)
ref1 = RefCounter()
a.emit(1, metadata=[{'ref': ref1}])
assert ref1.count == 1 # Retained until stream b has a value
# The lossless stream is never retained if all upstreams have a value
ref2 = RefCounter()
b.emit(2, metadata=[{'ref': ref2}])
assert ref1.count == 0
assert ref2.count == 1
# Kick out the stream b value and verify it has zero references
ref3 = RefCounter()
b.emit(3, metadata=[{'ref': ref3}])
assert ref2.count == 0
assert ref3.count == 1
# Verify the lossless value is not retained, but the lossy value is
ref4 = RefCounter()
a.emit(3, metadata=[{'ref': ref4}])
assert ref3.count == 1
assert ref4.count == 0
def test_zip_latest_metadata():
a = Stream()
b = Stream()
L = metadata(a.zip_latest(b)).sink_to_list()
a.emit(1, metadata=[{'v': 1}])
b.emit(2, metadata=[{'v': 2}])
a.emit(3)
b.emit(4, metadata=[{'v': 4}])
assert L == [
[{'v': 1}, {'v': 2}], # the first emit when 2 is introduced
[{'v': 2}] # 3 has no metadata
]
def test_connect():
source_downstream = Stream()
# connect assumes this default behaviour
# of stream initialization
assert not source_downstream.downstreams
assert source_downstream.upstreams == [None]
# initialize the second stream to connect to
source_upstream = Stream()
sout = source_downstream.map(lambda x : x + 1)
L = list()
sout = sout.map(L.append)
source_upstream.connect(source_downstream)
source_upstream.emit(2)
source_upstream.emit(4)
assert L == [3, 5]
def test_multi_connect():
source0 = Stream()
source1 = Stream()
source_downstream = source0.union(source1)
# connect assumes this default behaviour
# of stream initialization
assert not source_downstream.downstreams
# initialize the second stream to connect to
source_upstream = Stream()
sout = source_downstream.map(lambda x : x + 1)
L = list()
sout = sout.map(L.append)
source_upstream.connect(source_downstream)
source_upstream.emit(2)
source_upstream.emit(4)
assert L == [3, 5]
def test_disconnect():
source = Stream()
upstream = Stream()
L = upstream.sink_to_list()
source.emit(1)
assert L == []
source.connect(upstream)
source.emit(2)
source.emit(3)
assert L == [2, 3]
source.disconnect(upstream)
source.emit(4)
assert L == [2, 3]
def test_gc():
source = Stream()
L = []
a = source.map(L.append)
source.emit(1)
assert L == [1]
del a
import gc; gc.collect()
start = time()
while source.downstreams:
sleep(0.01)
assert time() < start + 1
source.emit(2)
assert L == [1]
@gen_test()
def test_from_file():
with tmpfile() as fn:
with open(fn, 'wt') as f:
f.write('{"x": 1, "y": 2}\n')
f.write('{"x": 2, "y": 2}\n')
f.write('{"x": 3, "y": 2}\n')
f.flush()
source = Stream.from_textfile(fn, poll_interval=0.010,
asynchronous=True, start=False)
L = source.map(json.loads).pluck('x').sink_to_list()
assert L == []
source.start()
yield await_for(lambda: len(L) == 3, timeout=5)
assert L == [1, 2, 3]
f.write('{"x": 4, "y": 2}\n')
f.write('{"x": 5, "y": 2}\n')
f.flush()
start = time()
while L != [1, 2, 3, 4, 5]:
yield gen.sleep(0.01)
assert time() < start + 2 # reads within 2s
@gen_test()
def test_from_file_end():
with tmpfile() as fn:
with open(fn, 'wt') as f:
f.write('data1\n')
f.flush()
source = Stream.from_textfile(fn, poll_interval=0.010,
start=False, from_end=True)
out = source.sink_to_list()
source.start()
assert out == []
yield await_for(lambda: source.started, 2, period=0.02)
f.write('data2\n')
f.flush()
yield await_for(lambda: out == ['data2\n'], timeout=5, period=0.1)
@gen_test()
def test_filenames():
with tmpfile() as fn:
os.mkdir(fn)
with open(os.path.join(fn, 'a'), 'w'):
pass
with open(os.path.join(fn, 'b'), 'w'):
pass
source = Stream.filenames(fn, asynchronous=True)
L = source.sink_to_list()
source.start()
while len(L) < 2:
yield gen.sleep(0.01)
assert L == [os.path.join(fn, x) for x in ['a', 'b']]
with open(os.path.join(fn, 'c'), 'w'):
pass
while len(L) < 3:
yield gen.sleep(0.01)
assert L == [os.path.join(fn, x) for x in ['a', 'b', 'c']]
def test_docstrings():
for s in [Stream, Stream()]:
assert 'every element' in s.map.__doc__
assert s.map.__name__ == 'map'
assert 'predicate' in s.filter.__doc__
assert s.filter.__name__ == 'filter'
def test_subclass():
class NewStream(Stream):
pass
@NewStream.register_api()
class foo(NewStream):
pass
assert hasattr(NewStream, 'map')
assert hasattr(NewStream(), 'map')
assert hasattr(NewStream, 'foo')
assert hasattr(NewStream(), 'foo')
assert not hasattr(Stream, 'foo')
assert not hasattr(Stream(), 'foo')
@gen_test()
def test_latest():
source = Stream(asynchronous=True)
L = []
@gen.coroutine
def slow_write(x):
yield gen.sleep(0.050)
L.append(x)
s = source.map(inc).latest().map(slow_write) # noqa: F841
source.emit(1)
yield gen.sleep(0.010)
source.emit(2)
source.emit(3)
start = time()
while len(L) < 2:
yield gen.sleep(0.01)
assert time() < start + 3
assert L == [2, 4]
yield gen.sleep(0.060)
assert L == [2, 4]
def test_latest_ref_counts():
source = Stream()
_ = source.latest()
ref1 = RefCounter()
source.emit(1, metadata=[{'ref': ref1}])
assert ref1.count == 1
ref2 = RefCounter()
source.emit(2, metadata=[{'ref': ref2}])
assert ref1.count == 0
assert ref2.count == 1
def test_destroy():
source = Stream()
s = source.map(inc)
L = s.sink_to_list()
source.emit(1)
assert L == [2]
s.destroy()
assert not list(source.downstreams)
assert not s.upstreams
source.emit(2)
assert L == [2]
def dont_test_stream_kwargs(clean): # noqa: F811
''' Test the good and bad kwargs for the stream
Currently just stream_name
'''
test_name = "some test name"
sin = Stream(stream_name=test_name)
sin2 = Stream()
assert sin.name == test_name
# when not defined, should be None
assert sin2.name is None
# add new core methods here, initialized
# these should be functions, use partial to partially initialize them
# (if they require more arguments)
streams = [
# some filter kwargs, so we comment them out
partial(sin.map, lambda x : x),
partial(sin.accumulate, lambda x1, x2 : x1),
partial(sin.filter, lambda x : True),
partial(sin.partition, 2),
partial(sin.sliding_window, 2),
partial(sin.timed_window, .01),
partial(sin.rate_limit, .01),
partial(sin.delay, .02),
partial(sin.buffer, 2),
partial(sin.zip, sin2),
partial(sin.combine_latest, sin2),
sin.frequencies,
sin.flatten,
sin.unique,
sin.union,
partial(sin.pluck, 0),
sin.collect,
]
good_kwargs = dict(stream_name=test_name)
bad_kwargs = dict(foo="bar")
for s in streams:
# try good kwargs
sout = s(**good_kwargs)
assert sout.name == test_name
del sout
with pytest.raises(TypeError):
sout = s(**bad_kwargs)
sin.emit(1)
# need a second emit for accumulate
sin.emit(1)
del sout
# verify that sout is properly deleted each time by emitting once into sin
# and not getting TypeError
# garbage collect and then try
import gc
gc.collect()
sin.emit(1)
@pytest.fixture
def thread(loop): # noqa: F811
from threading import Thread, Event
thread = Thread(target=loop.start)
thread.daemon = True
thread.start()
event = Event()
loop.add_callback(event.set)
event.wait()
return thread
def test_percolate_loop_information(clean): # noqa: F811
source = Stream()
assert not source.loop
s = source.timed_window(0.5)
assert source.loop is s.loop
def test_separate_thread_without_time(loop, thread): # noqa: F811
assert thread.is_alive()
source = Stream(loop=loop)
L = source.map(inc).sink_to_list()
for i in range(10):
source.emit(i)
assert L[-1] == i + 1
def test_separate_thread_with_time(clean): # noqa: F811
L = []
@gen.coroutine
def slow_write(x):
yield gen.sleep(0.1)
L.append(x)
source = Stream(asynchronous=False)
source.map(inc).sink(slow_write)
start = time()
source.emit(1)
stop = time()
assert stop - start > 0.1
assert L == [2]
def test_execution_order():
L = []
for i in range(5):
s = Stream()
b = s.pluck(1)
a = s.pluck(0)
li = a.combine_latest(b, emit_on=a).sink_to_list()
z = [(1, 'red'), (2, 'blue'), (3, 'green')]
for zz in z:
s.emit(zz)
L.append((li, ))
for ll in L:
assert ll == L[0]
L2 = []
for i in range(5):
s = Stream()
a = s.pluck(0)
b = s.pluck(1)
li = a.combine_latest(b, emit_on=a).sink_to_list()
z = [(1, 'red'), (2, 'blue'), (3, 'green')]
for zz in z:
s.emit(zz)
L2.append((li,))
for ll, ll2 in zip(L, L2):
assert ll2 == L2[0]
assert ll != ll2
@gen_test()
def test_map_errors_log():
a = Stream(asynchronous=True)
b = a.delay(0.001).map(lambda x: 1 / x) # noqa: F841
with captured_logger('streamz') as logger:
a._emit(0)
yield gen.sleep(0.1)
out = logger.getvalue()
assert 'ZeroDivisionError' in out
def test_map_errors_raises():
a = Stream()
b = a.map(lambda x: 1 / x) # noqa: F841
with pytest.raises(ZeroDivisionError):
a.emit(0)
@gen_test()
def test_accumulate_errors_log():
a = Stream(asynchronous=True)
b = a.delay(0.001).accumulate(lambda x, y: x / y, with_state=True) # noqa: F841
with captured_logger('streamz') as logger:
a._emit(1)
a._emit(0)
yield gen.sleep(0.1)
out = logger.getvalue()
assert 'ZeroDivisionError' in out
def test_accumulate_errors_raises():
a = Stream()
b = a.accumulate(lambda x, y: x / y, with_state=True) # noqa: F841
with pytest.raises(ZeroDivisionError):
a.emit(1)
a.emit(0)
@gen_test()
def test_sync_in_event_loop():
a = Stream()
assert not a.asynchronous
L = a.timed_window(0.01).sink_to_list()
sleep(0.05)
assert L
assert a.loop
assert a.loop is not IOLoop.current()
def test_share_common_ioloop(clean): # noqa: F811
a = Stream()
b = Stream()
aa = a.timed_window(0.01)
bb = b.timed_window(0.01)
assert aa.loop is bb.loop
@pytest.mark.parametrize('data', [
[[], [0, 1, 2, 3, 4, 5]],
[[None, None, None], [0, 1, 2, 3, 4, 5]],
[[1, None, None], [1, 2, 3, 4, 5]],
[[None, 4, None], [0, 1, 2, 3]],
[[None, 4, 2], [0, 2]],
[[3, 1, None], []]
])
def test_slice(data):
pars, expected = data
a = Stream()
b = a.slice(*pars)
out = b.sink_to_list()
for i in range(6):
a.emit(i)
assert out == expected
def test_slice_err():
a = Stream()
with pytest.raises(ValueError):
a.slice(end=-1)
def test_start():
flag = []
class MySource(Stream):
def start(self):
flag.append(True)
s = MySource().map(inc)
s.start()
assert flag == [True]
def test_connect_zip():
a = Stream()
b = Stream()
c = Stream()
x = a.zip(b)
L = x.sink_to_list()
c.connect(x)
a.emit(1)
b.emit(1)
assert not L
c.emit(1)
assert L == [(1, 1, 1)]
def test_disconnect_zip():
a = Stream()
b = Stream()
c = Stream()
x = a.zip(b, c)
L = x.sink_to_list()
b.disconnect(x)
a.emit(1)
b.emit(1)
assert not L
c.emit(1)
assert L == [(1, 1)]
def test_connect_combine_latest():
a = Stream()
b = Stream()
c = Stream()
x = a.combine_latest(b, emit_on=a)
L = x.sink_to_list()
c.connect(x)
b.emit(1)
c.emit(1)
a.emit(1)
assert L == [(1, 1, 1)]
def test_connect_discombine_latest():
a = Stream()
b = Stream()
c = Stream()
x = a.combine_latest(b, c, emit_on=a)
L = x.sink_to_list()
c.disconnect(x)
b.emit(1)
c.emit(1)
a.emit(1)
assert L == [(1, 1)]
if sys.version_info >= (3, 5):
from streamz.tests.py3_test_core import * # noqa
def test_buffer_after_partition():
Stream().partition(1).buffer(1)
def test_buffer_after_timed_window():
Stream().timed_window(1).buffer(1)
def test_buffer_after_sliding_window():
Stream().sliding_window(1).buffer(1)
def test_backpressure_connect_empty_stream():
@Stream.register_api()
class from_list(Stream):
def __init__(self, source, **kwargs):
self.source = source
super().__init__(ensure_io_loop=True, **kwargs)
def start(self):
self.stopped = False
self.loop.add_callback(self.run)
@gen.coroutine
def run(self):
while not self.stopped and len(self.source) > 0:
yield self._emit(self.source.pop(0))
source_list = [0, 1, 2, 3, 4]
source = Stream.from_list(source_list)
sout = Stream()
L = sout.rate_limit(1).sink_to_list()
source.connect(sout)
source.start()
wait_for(lambda: L == [0], 0.01)
assert len(source_list) > 0
| mrocklin/streams | streamz/tests/test_core.py | Python | bsd-3-clause | 35,537 |
# coding: utf-8
from __future__ import unicode_literals, division, print_function
import os
from pymatgen.util.testing import PymatgenTest
from pymatgen.core.structure import Structure
from pymatgen.core.units import Ha_to_eV
from pymatgen.io.abinitio.abiobjects import *
import warnings
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..",
'test_files')
def cif_paths():
cifpaths = []
print(test_dir)
for fname in os.listdir(test_dir):
fname = os.path.join(test_dir, fname)
if os.path.isfile(fname) and fname.endswith(".cif"):
cifpaths.append(fname)
assert cifpaths
return cifpaths
class SpinModeTest(PymatgenTest):
def test_base(self):
polarized = SpinMode.as_spinmode("polarized")
other_polarized = SpinMode.as_spinmode("polarized")
unpolarized = SpinMode.as_spinmode("unpolarized")
polarized.to_abivars()
self.assertTrue(polarized is other_polarized)
self.assertTrue(polarized == other_polarized)
self.assertTrue(polarized != unpolarized)
# Test pickle
self.serialize_with_pickle(polarized)
class SmearingTest(PymatgenTest):
def test_base(self):
fd1ev = Smearing.as_smearing("fermi_dirac:1 eV")
print(fd1ev)
fd1ev.to_abivars()
self.assertTrue(fd1ev)
same_fd = Smearing.as_smearing("fermi_dirac:"+ str(1.0/Ha_to_eV))
self.assertTrue(same_fd == fd1ev)
nosmear = Smearing.nosmearing()
self.assertFalse(nosmear)
self.assertTrue(nosmear != fd1ev)
new_fd1ev = Smearing.from_dict(fd1ev.as_dict())
self.assertTrue(new_fd1ev == fd1ev)
# Test pickle
self.serialize_with_pickle(fd1ev)
class ElectronsAlgorithmTest(PymatgenTest):
def test_base(self):
algo = ElectronsAlgorithm(nstep=70)
print(algo.to_abivars())
# Test pickle
self.serialize_with_pickle(algo)
class ElectronsTest(PymatgenTest):
def test_base(self):
default_electrons = Electrons()
self.assertTrue(default_electrons.nsppol==2)
self.assertTrue(default_electrons.nspinor==1)
self.assertTrue(default_electrons.nspden==2)
print(default_electrons.to_abivars())
#new = Electron.from_dict(default_electrons.as_dict())
# Test pickle
self.serialize_with_pickle(default_electrons, test_eq=False)
class AbiStructureTest(PymatgenTest):
def setUp(self):
self.cif_paths = cif_paths()
def test_asabistructure(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for cif_path in self.cif_paths:
print("about to init abistructure from %s " % cif_path)
st = asabistructure(cif_path)
self.assertTrue(st is asabistructure(st))
self.assertTrue(isinstance(st, Structure))
# TODO
if not st.is_ordered:
print("Unordered structures are not supported")
continue
print(st.to_abivars())
# Test pickle
# FIXME: protocol 2 does not work due to __new__
self.serialize_with_pickle(st, protocols=[0, 1], test_eq=True)
#class KSamplingTest(PymatgenTest):
#class RelaxationTest(PymatgenTest):
class PPModelTest(PymatgenTest):
def test_base(self):
godby = PPModel.as_ppmodel("godby:12 eV")
print(godby)
print(repr(godby))
godby.to_abivars()
self.assertTrue(godby)
same_godby = PPModel.as_ppmodel("godby:"+ str(12.0/Ha_to_eV))
self.assertTrue(same_godby == godby)
noppm = PPModel.noppmodel()
self.assertFalse(noppm)
self.assertTrue(noppm != godby)
new_godby = PPModel.from_dict(godby.as_dict())
self.assertTrue(new_godby == godby)
# Test pickle
self.serialize_with_pickle(godby)
if __name__ == '__main__':
import unittest
unittest.main()
| yanikou19/pymatgen | pymatgen/io/abinitio/tests/test_abiobjects.py | Python | mit | 4,064 |
"""
Example producer that sends a single message and exits.
You can use `complete_receive.py` to receive the message sent.
"""
from __future__ import absolute_import, unicode_literals
from kombu import Connection, Producer, Exchange, Queue
#: By default messages sent to exchanges are persistent (delivery_mode=2),
#: and queues and exchanges are durable.
exchange = Exchange('kombu_demo', type='direct')
queue = Queue('kombu_demo', exchange, routing_key='kombu_demo')
with Connection('amqp://guest:guest@localhost:5672//') as connection:
#: Producers are used to publish messages.
#: a default exchange and routing key can also be specified
#: as arguments the Producer, but we rather specify this explicitly
#: at the publish call.
producer = Producer(connection)
#: Publish the message using the json serializer (which is the default),
#: and zlib compression. The kombu consumer will automatically detect
#: encoding, serialization and compression used and decode accordingly.
producer.publish({'hello': 'world'},
exchange=exchange,
routing_key='kombu_demo',
serializer='json', compression='zlib')
| urbn/kombu | examples/complete_send.py | Python | bsd-3-clause | 1,211 |
#-------------------------------------------------------------------------------
# Copyright 2017 Cognizant Technology Solutions
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#-------------------------------------------------------------------------------
'''
Created on Jun 28, 2016
@author: 463188
'''
import urllib.request, urllib.error, urllib.parse
import xmltodict
import json
import base64
from ....core.BaseAgent3 import BaseAgent
class NexusAgent(BaseAgent):
@BaseAgent.timed
def process(self):
self.userid = self.getCredential("userid")
self.passwd = self.getCredential("passwd")
BaseUrl = self.config.get("baseUrl", '')
FirstEndPoint = self.config.get("firstEndPoint", '')
nexIDs = self.getResponse(FirstEndPoint, 'GET', self.userid, self.passwd, None)
previousname = nexIDs["items"][0]["repository"]
fetchNextPage = True
while fetchNextPage:
for artifacts in range(len(nexIDs["items"])):
if nexIDs["items"][artifacts]["repository"] == previousname and artifacts != 0:
continue
else:
repoid = nexIDs["items"][artifacts]["repository"]
artifactid = nexIDs["items"][artifacts]["name"]
previousname = repoid
groupid = nexIDs["items"][artifacts]["group"].replace(".", "/", 3)
request = urllib.request.Request(self.config.get("baseUrl", '')+"repository/"+repoid+"/"+groupid+"/"+nexIDs["items"][artifacts]["name"]+"/maven-metadata.xml")
request.add_header('Authorization', 'Basic %s' % self.getBase64Value(self.userid,self.passwd))
mavenmetafile = urllib.request.urlopen(request)#reading base mavenmetadata file to fetch main version
#mavenmetafile = urllib2.urlopen(self.config.get("baseUrl", '')+"repository/"+repoid+"/"+groupid+"/"+nexIDs["items"][artifacts]["name"]+"/maven-metadata.xml")#reading base mavenmetadata file to fetch main version
mavenmetadata = xmltodict.parse(mavenmetafile.read())
mavenmetafile.close()
lastupdated = mavenmetadata["metadata"]["versioning"]["lastUpdated"]
tracking = self.trackingUpdation(repoid, lastupdated)
self.prepareAndPublish(nexIDs["items"][artifacts], tracking)
continuationToken = nexIDs["continuationToken"]
if continuationToken is None:
fetchNextPage= False;
else :
fetchNextPage= True;
newFirstEndPoint = FirstEndPoint+'&continuationToken='+str(continuationToken)
nexIDs = self.getResponse(newFirstEndPoint, 'GET', self.userid, self.passwd, None)
def prepareAndPublish(self, nexIDs, tracking):
repoid = nexIDs["repository"]
artifactid = nexIDs["name"]
groupid = nexIDs["group"].replace(".", "/", 3)
request = urllib.request.Request(self.config.get("baseUrl", '')+"repository/"+repoid+"/"+groupid+"/"+nexIDs["name"]+"/maven-metadata.xml")
request.add_header('Authorization', 'Basic %s' % self.getBase64Value(self.userid,self.passwd))
mavenmetafile = urllib.request.urlopen(request)#reading base mavenmetadata file to fetch main version
mavenmetadata = xmltodict.parse(mavenmetafile.read())
mavenmetafile.close()
lastupdated = mavenmetadata["metadata"]["versioning"]["lastUpdated"]
if tracking>0:
if tracking == 1:
if isinstance(mavenmetadata["metadata"]["versioning"]["versions"]["version"],list):
for version in mavenmetadata["metadata"]["versioning"]["versions"]["version"]:
self.publishdata(repoid, groupid, nexIDs, version, artifactid, lastupdated)
else:
version = mavenmetadata["metadata"]["versioning"]["versions"]["version"]
self.publishdata(repoid, groupid, nexIDs, version, artifactid, lastupdated)
else:
version = mavenmetadata["metadata"]["versioning"]["versions"]["version"][len(mavenmetadata["metadata"]["versioning"]["versions"]["version"])-1]
self.publishdata(repoid, groupid, nexIDs, version, artifactid, lastupdated)
def publishdata(self, repoid, groupid, nexIDs, version, artifactid, lastupdated):
data = []
print(self.config.get("baseUrl", '')+"repository/"+repoid+"/"+groupid+"/"+nexIDs["name"]+"/"+version+"/"+nexIDs["name"]+"-"+version+".pom")
request = urllib.request.Request(self.config.get("baseUrl", '')+"repository/"+repoid+"/"+groupid+"/"+nexIDs["name"]+"/"+version+"/"+nexIDs["name"]+"-"+version+".pom")
request.add_header('Authorization', 'Basic %s' % self.getBase64Value(self.userid,self.passwd))
mainmavenxml = urllib.request.urlopen(request)#reading mavenmetadata file inside main version folder
#mainmavenxml = urllib.request.urlopen(self.config.get("baseUrl", '')+"repository/"+repoid+"/"+groupid+"/"+nexIDs["name"]+"/"+version+"/"+nexIDs["name"]+"-"+version+".pom")#reading mavenmetadata file inside main version folder
mainmavendata = mainmavenxml.read()
mainmavenxml.close()
artifactfullname = artifactid + "-" + version + "." + xmltodict.parse(mainmavendata)["project"]["packaging"]
injectData = {}
injectData["timestamp"] = lastupdated
injectData["version"] = version
injectData["currentID"] = groupid+ "-" + artifactfullname
injectData["resourceKey"] = nexIDs["group"] + ':' + nexIDs["name"]
injectData["Status"] = "Archive"
injectData["Author"] = self.userid
data.append(injectData)
self.publishToolsData(data)
def nexus(self, logResponse):
#print (logResponse)
return
def getBase64Value(self,userid,passwd):
userpass = '%s:%s' % (userid,passwd)
base64string = base64.standard_b64encode(userpass.encode('utf-8'))
return base64string.decode('utf-8')
def trackingUpdation(self, repoid, lastupdated):
self.loadTrackingConfig()
if self.tracking.get(repoid) is None:
self.tracking[repoid] = lastupdated
self.updateTrackingJson(self.tracking)
return 1
else:
if int(self.tracking.get(repoid, None)) < int(lastupdated):
self.tracking[repoid] = lastupdated
self.updateTrackingJson(self.tracking)
return 2
else:
return 0
if __name__ == "__main__":
NexusAgent() | CognizantOneDevOps/Insights | PlatformAgents/com/cognizant/devops/platformagents/agents/artifactmanagement/nexus/NexusAgent3.py | Python | apache-2.0 | 7,233 |
# Topydo - A todo.txt client written in Python.
# Copyright (C) 2014 - 2015 Bram Schoenmakers <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from topydo.lib.printers.PrettyPrinter import PrettyPrinter
from topydo.lib.Todo import Todo
from topydo.lib.TodoFile import TodoFile
from topydo.lib.TodoList import TodoList
def load_file(p_filename):
"""
Loads a todo file from the given filename and returns a list of todos.
"""
todolist = load_file_to_raw_list(p_filename)
return [Todo(src) for src in todolist]
def load_file_to_raw_list(p_filename):
"""
Loads a todo file from the given filename and returns a list of todo
strings (unparsed).
"""
todofile = TodoFile(p_filename)
return todofile.read()
def load_file_to_todolist(p_filename):
"""
Loads a todo file to a TodoList instance.
"""
todolist = load_file_to_raw_list(p_filename)
return TodoList(todolist)
def todolist_to_string(p_list):
""" Converts a todo list to a single string. """
return '\n'.join([t.source() for t in p_list])
def print_view(p_view):
printer = PrettyPrinter()
return "\n".join([str(s) for s in printer.print_list(p_view.todos)])
| bram85/topydo | test/facilities.py | Python | gpl-3.0 | 1,802 |
from __future__ import absolute_import, unicode_literals
from contextlib import contextmanager
from os.path import normpath
from pprint import pformat
import django
from django import http
from django.conf.urls import url
from django.db.models.query import QuerySet, RawQuerySet
from django.template import Context, RequestContext, Template
from django.test.signals import template_rendered
from django.test.utils import instrumented_test_render
from django.utils.encoding import force_text
from django.utils import six
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.compat import (
OrderedDict, get_template_dirs, get_template_context_processors)
from debug_toolbar.panels import Panel
from debug_toolbar.panels.sql.tracking import recording, SQLQueryTriggered
from debug_toolbar.panels.templates import views
# Monkey-patch to enable the template_rendered signal. The receiver returns
# immediately when the panel is disabled to keep the overhead small.
# Code taken and adapted from Simon Willison and Django Snippets:
# http://www.djangosnippets.org/snippets/766/
if Template._render != instrumented_test_render:
Template.original_render = Template._render
Template._render = instrumented_test_render
# Monkey-patch to store items added by template context processors. The
# overhead is sufficiently small to justify enabling it unconditionally.
if django.VERSION[:2] < (1, 8):
def _request_context___init__(
self, request, dict_=None, processors=None, current_app=None,
use_l10n=None, use_tz=None):
Context.__init__(
self, dict_, current_app=current_app,
use_l10n=use_l10n, use_tz=use_tz)
if processors is None:
processors = ()
else:
processors = tuple(processors)
self.context_processors = OrderedDict()
updates = dict()
std_processors = get_template_context_processors()
for processor in std_processors + processors:
name = '%s.%s' % (processor.__module__, processor.__name__)
context = processor(request)
self.context_processors[name] = context
updates.update(context)
self.update(updates)
RequestContext.__init__ = _request_context___init__
else:
@contextmanager
def _request_context_bind_template(self, template):
if self.template is not None:
raise RuntimeError("Context is already bound to a template")
self.template = template
# Set context processors according to the template engine's settings.
processors = (template.engine.template_context_processors +
self._processors)
self.context_processors = OrderedDict()
updates = {}
for processor in processors:
name = '%s.%s' % (processor.__module__, processor.__name__)
context = processor(self.request)
self.context_processors[name] = context
updates.update(context)
self.dicts[self._processors_index] = updates
try:
yield
finally:
self.template = None
# Unset context processors.
self.dicts[self._processors_index] = {}
RequestContext.bind_template = _request_context_bind_template
# Monkey-patch versions of Django where Template doesn't store origin.
# See https://code.djangoproject.com/ticket/16096.
if django.VERSION[:2] < (1, 7):
old_template_init = Template.__init__
def new_template_init(self, template_string, origin=None, name='<Unknown Template>'):
old_template_init(self, template_string, origin, name)
self.origin = origin
Template.__init__ = new_template_init
class TemplatesPanel(Panel):
"""
A panel that lists all templates used during processing of a response.
"""
def __init__(self, *args, **kwargs):
super(TemplatesPanel, self).__init__(*args, **kwargs)
self.templates = []
def _store_template_info(self, sender, **kwargs):
template, context = kwargs['template'], kwargs['context']
# Skip templates that we are generating through the debug toolbar.
if (isinstance(template.name, six.string_types) and
template.name.startswith('debug_toolbar/')):
return
context_list = []
for context_layer in getattr(context, 'dicts', []):
temp_layer = {}
if hasattr(context_layer, 'items'):
for key, value in context_layer.items():
# Replace any request elements - they have a large
# unicode representation and the request data is
# already made available from the Request panel.
if isinstance(value, http.HttpRequest):
temp_layer[key] = '<<request>>'
# Replace the debugging sql_queries element. The SQL
# data is already made available from the SQL panel.
elif key == 'sql_queries' and isinstance(value, list):
temp_layer[key] = '<<sql_queries>>'
# Replace LANGUAGES, which is available in i18n context processor
elif key == 'LANGUAGES' and isinstance(value, tuple):
temp_layer[key] = '<<languages>>'
# QuerySet would trigger the database: user can run the query from SQL Panel
elif isinstance(value, (QuerySet, RawQuerySet)):
model_name = "%s.%s" % (
value.model._meta.app_label, value.model.__name__)
temp_layer[key] = '<<%s of %s>>' % (
value.__class__.__name__.lower(), model_name)
else:
try:
recording(False)
pformat(value) # this MAY trigger a db query
except SQLQueryTriggered:
temp_layer[key] = '<<triggers database query>>'
except UnicodeEncodeError:
temp_layer[key] = '<<unicode encode error>>'
except Exception:
temp_layer[key] = '<<unhandled exception>>'
else:
temp_layer[key] = value
finally:
recording(True)
try:
context_list.append(pformat(temp_layer))
except UnicodeEncodeError:
pass
kwargs['context'] = [force_text(item) for item in context_list]
kwargs['context_processors'] = getattr(context, 'context_processors', None)
self.templates.append(kwargs)
# Implement the Panel API
nav_title = _("Templates")
@property
def title(self):
num_templates = len(self.templates)
return _("Templates (%(num_templates)s rendered)") % {'num_templates': num_templates}
@property
def nav_subtitle(self):
if self.templates:
return self.templates[0]['template'].name
return ''
template = 'debug_toolbar/panels/templates.html'
@classmethod
def get_urls(cls):
return [
url(r'^template_source/$', views.template_source, name='template_source'),
]
def enable_instrumentation(self):
template_rendered.connect(self._store_template_info)
def disable_instrumentation(self):
template_rendered.disconnect(self._store_template_info)
def generate_stats(self, request, response):
template_context = []
for template_data in self.templates:
info = {}
# Clean up some info about templates
template = template_data.get('template', None)
if hasattr(template, 'origin') and template.origin and template.origin.name:
template.origin_name = template.origin.name
else:
template.origin_name = _('No origin')
info['template'] = template
# Clean up context for better readability
if self.toolbar.config['SHOW_TEMPLATE_CONTEXT']:
context_list = template_data.get('context', [])
info['context'] = '\n'.join(context_list)
template_context.append(info)
# Fetch context_processors from any template
if self.templates:
context_processors = self.templates[0]['context_processors']
else:
context_processors = None
template_dirs = get_template_dirs()
self.record_stats({
'templates': template_context,
'template_dirs': [normpath(x) for x in template_dirs],
'context_processors': context_processors,
})
| seperman/django-debug-toolbar | debug_toolbar/panels/templates/panel.py | Python | bsd-3-clause | 8,872 |
#!/usr/bin/env python
"""
Convert a series of images to a CSS animation. Outputs an image file
containing the frames of the animation and an HTML file with sample CSS
for embedding the animation on a page.
Usage: walt FILENAMES... [options]
Options:
--help Show this screen.
--version Show version.
--verbose Output extra information during run.
--out-image <filename> Filename for composite image. [default: walt.png]
--out-html <filename> Filename for sample HTML. [default: walt.html]
--trim Trim the edges of the images.
--trim-color <color> Color to trim with.
--prefix <prefix> Prefix to use for CSS classes in sample HTML. [default: walt]
--duration <duration> Duration of the animation. Defaults to a value
that ensures 24fps.
"""
import os.path
from docopt import docopt
from PIL import Image, ImageChops
__version__ = '0.1'
with open(os.path.join(os.path.dirname(__file__), 'sample_template.html')) as f:
template = f.read()
def find_trimmed_bbox(image, background_color=None):
"""
Find the bounding box of non-background regions in the image. If no
background color is given, the color at the top left corner is
considered the background color.
"""
background_color = background_color or image.getpixel((0, 0))
background = Image.new(image.mode, image.size, background_color)
diff = ImageChops.difference(image, background)
return diff.getbbox()
def generate_html(filename, frame_width, frame_height, frame_count, image_filename, prefix,
duration=None):
rendered_template = template.format(
prefix=prefix,
frame_width=frame_width,
frame_height=frame_height,
frame_count=frame_count,
duration=duration or '{0:.2f}s'.format(float(frame_count) / 24.0),
final_width=frame_width * frame_count,
)
with open(filename, 'w') as f:
f.write(rendered_template)
def main():
args = docopt(__doc__, version=__version__)
# Utility function to output if verbose.
def out(msg):
if args['--verbose']:
print msg
# Open each image in the sequence.
out('Loading images...')
images = []
for filename in args['FILENAMES']:
images.append(Image.open(filename))
# If requested, trim excess background space from each frame.
if args['--trim']:
out('Trimming edges...')
# Find the bounding box for each image.
bboxes = []
for image in images:
bboxes.append(find_trimmed_bbox(image, args['--trim-color']))
# Find the maximum bounding box that contains all the bounding
# boxes in the sequence.
lefts, tops, rights, bottoms = zip(*bboxes)
left = min(lefts)
top = min(tops)
right = max(rights)
bottom = max(bottoms)
width = right - left
height = bottom - top
bbox = (left, top, right, bottom)
out('Bounding Box: ' + unicode(bbox))
out('Width: ' + unicode(width))
out('Height: ' + unicode(height))
# Crop each image.
cropped_images = []
for image in images:
cropped_images.append(image.crop(bbox))
images = cropped_images
else:
width, height = images[0].size
# Generate a composite image of each image in the sequence.
out('Generating composite image...')
final_width = width * len(images)
out('Composite width: ' + unicode(final_width))
final_image = Image.new('RGBA', (final_width, height))
for k, image in enumerate(images):
final_image.paste(image, (width * k, 0), image)
final_image.save(args['--out-image'])
generate_html(
filename=args['--out-html'],
frame_width=width,
frame_height=height,
frame_count=len(images),
image_filename=args['--out-image'],
prefix=args['--prefix'],
duration=args['--duration'],
)
out('Done!')
if __name__ == 'main':
main()
| Osmose/walt | walt.py | Python | mit | 4,090 |
from collections import OrderedDict
from flask import (
Blueprint, render_template, request, flash, redirect, url_for, abort
)
from flask.ext import menu
from sqlalchemy.orm import aliased
from sqlalchemy.orm.strategy_options import Load
from sqlalchemy.sql.expression import select
from models import *
from forms import *
def fix_page_number_overflow(paginate, route_name, **kwargs):
if not paginate.items and paginate.total > 0:
new_page = paginate.pages
return redirect(url_for(
route_name, page=new_page, **kwargs
))
# 5 for easier testing purposes
monkeys_per_page = 5
bp_monkey = Blueprint('monkey', __name__)
# '/monkey' route goes first because 'menu' module takes last route's url
@bp_monkey.route('/monkey')
@bp_monkey.route('/')
@menu.register_menu(bp_monkey, '.view_monkey_list', 'Monkeys list', order=0)
def view_monkey_list():
global monkeys_per_page
sort_by = request.args.get('sort_by', 'name', type=str)
sort_asc_str = request.args.get('sort_asc', None, type=str)
page = request.args.get('page', 1, type=int)
fields_order = OrderedDict([
('name', True),
('best_friend.name', True),
('friends_count', False)
])
sort_asc = None if sort_asc_str is None else sort_asc_str == 'True'
if sort_by not in fields_order or sort_asc is None:
sort_by = 'name'
sort_asc = fields_order[sort_by]
fields_order[sort_by] = not sort_asc
best_friend = aliased(Monkey)
# Hack. Can be fixed by denormalization
if sort_by != 'best_friend.name':
monkeys_order_by = getattr(Monkey, sort_by)
else:
monkeys_order_by = getattr(best_friend, 'name')
paginate = Monkey.query.outerjoin(best_friend, Monkey.best_friend).options(
Load(Monkey).load_only(Monkey.name, Monkey.friends_count)
.contains_eager(Monkey.best_friend, alias=best_friend)
.load_only(best_friend.name)
).order_by(
getattr(monkeys_order_by, 'asc' if sort_asc else 'desc')()
).paginate(
page, per_page=monkeys_per_page, error_out=False
)
new_url = fix_page_number_overflow(
paginate, '.view_monkey_list', sort_by=sort_by, sort_asc=sort_asc
)
if new_url:
return new_url
return render_template(
'view_monkey_list.html',
fields_order=fields_order,
sort_by=sort_by,
sort_asc=sort_asc,
paginate=paginate
)
@bp_monkey.route('/monkey/<int:monkey_id>')
def view_monkey(monkey_id):
best_friend = aliased(Monkey)
monkey = Monkey.query.outerjoin(best_friend, Monkey.best_friend).options(
Load(Monkey).load_only(Monkey.name, Monkey.age, Monkey.email)
.contains_eager(Monkey.best_friend, alias=best_friend)
.load_only(best_friend.name)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
return render_template('view_monkey.html', monkey=monkey)
@menu.register_menu(bp_monkey, '.add_monkey', 'Add monkey', order=1)
@bp_monkey.route('/monkey/add', methods=['GET', 'POST'])
def add_monkey():
monkey_form = MonkeyForm()
if request.method == 'POST':
if monkey_form.validate():
data = dict(monkey_form.data.items())
del data['id']
del data['submit_button']
monkey = Monkey(**data)
db.session.add(monkey)
db.session.commit()
flash('Monkey was succesfully created.')
return redirect(url_for('.view_monkey', monkey_id=monkey.id))
else:
monkey_form.validate_on_submit()
return render_template('add_monkey.html', monkey_form=monkey_form)
@bp_monkey.route('/monkey/<int:monkey_id>/edit', methods=['GET', 'POST'])
def edit_monkey(monkey_id):
monkey = Monkey.query.options(
Load(Monkey).load_only(Monkey.name, Monkey.age, Monkey.email)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
if request.method == 'POST':
monkey_form = MonkeyForm()
if monkey_form.validate():
data = dict(monkey_form.data.items())
del data['id']
del data['submit_button']
monkey = Monkey(**data)
db.session.add(monkey)
db.session.commit()
flash('Monkey was succesfully edited.')
return redirect(url_for('.view_monkey', monkey_id=monkey.id))
else:
monkey_form.validate_on_submit()
else:
monkey_form = MonkeyForm(**monkey.__dict__)
return render_template(
'edit_monkey.html', monkey=monkey, monkey_form=monkey_form
)
@bp_monkey.route('/monkey/<int:monkey_id>/delete', methods=['GET', 'POST'])
def delete_monkey(monkey_id):
if request.method == 'POST':
monkey = Monkey.query.get(monkey_id)
if monkey is None:
abort(404)
db.session.delete(monkey)
db.session.commit()
flash('Monkey {0} was succesfully deleted.'.format(monkey.name))
return redirect(url_for('.view_monkey_list'))
else:
monkey = Monkey.query.options(
Load(Monkey).load_only(Monkey.name, Monkey.age, Monkey.email)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
return render_template('delete_monkey.html', monkey=monkey)
@bp_monkey.route('/friend/<int:monkey_id>')
def view_friend_list(monkey_id):
global monkeys_per_page
page = request.args.get('page', 1, type=int)
best_friend = aliased(Monkey)
monkey = Monkey.query.outerjoin(best_friend, Monkey.best_friend).options(
Load(Monkey).load_only(Monkey.name)
.contains_eager(Monkey.best_friend, alias=best_friend)
.load_only(best_friend.name)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
paginate = monkey.friends.options(
Load(Monkey).load_only(Monkey.name, Monkey.age, Monkey.email)
).order_by(
Monkey.name.asc()
).paginate(
page, per_page=monkeys_per_page, error_out=False
)
new_url = fix_page_number_overflow(
paginate, '.view_friend_list', monkey_id=monkey_id
)
if new_url:
return new_url
return render_template(
'view_friend_list.html', monkey=monkey, paginate=paginate
)
@bp_monkey.route('/friend/<int:monkey_id>/add')
def view_add_friend(monkey_id):
global monkeys_per_page
monkey = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
page = request.args.get('page', 1, type=int)
paginate = Monkey.query.filter(~(Monkey.id.in_(select(
[friends_relationships.c.friend_id],
friends_relationships.c.monkey_id == monkey_id
))), Monkey.id != monkey_id).options(
Load(Monkey).load_only(Monkey.name, Monkey.age, Monkey.email)
).order_by(
Monkey.name.asc()
).paginate(
page, per_page=monkeys_per_page, error_out=False
)
new_url = fix_page_number_overflow(
paginate, '.view_add_friend', monkey_id=monkey_id
)
if new_url:
return new_url
return render_template(
'view_add_friend.html',
monkey=monkey,
paginate=paginate
)
@bp_monkey.route(
'/friend/<int:monkey_id>/add/<int:friend_id>', methods=['POST']
)
def add_friend(monkey_id, friend_id):
page_was = request.args.get('page_was', 1, type=int)
monkey = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
friend = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == friend_id).first()
if friend is None:
abort(404)
monkey.add_friend(friend)
db.session.commit()
flash(
'Friend {0} added to monkey {1} friends.'
.format(friend.name, monkey.name)
)
return redirect(url_for(
'.view_add_friend', monkey_id=monkey_id, page=page_was
))
@bp_monkey.route(
'/friend/<int:monkey_id>/delete/<int:friend_id>', methods=['POST']
)
def delete_friend(monkey_id, friend_id):
page_was = request.args.get('page_was', 1, type=int)
monkey = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
friend = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == friend_id).first()
if friend is None:
abort(404)
monkey.delete_friend(friend)
db.session.commit()
flash(
'Friend {0} deleted from monkey {1} friends.'
.format(friend.name, monkey.name)
)
return redirect(url_for(
'.view_friend_list', monkey_id=monkey_id, page=page_was
))
@bp_monkey.route(
'/best_friend/<int:monkey_id>/set/<int:friend_id>', methods=['POST']
)
def set_best_friend(monkey_id, friend_id):
page_was = request.args.get('page_was', 1, type=int)
monkey = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
friend = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == friend_id).first()
if friend is None:
abort(404)
monkey.set_best_friend(friend)
db.session.commit()
flash(
'Best friend {0} set for monkey {1}.'
.format(friend.name, monkey.name)
)
return redirect(url_for(
'.view_add_friend', monkey_id=monkey_id, page=page_was
))
@bp_monkey.route(
'/best_friend/<int:monkey_id>/unset/<int:friend_id>', methods=['POST']
)
def unset_best_friend(monkey_id, friend_id):
page_was = request.args.get('page_was', 1, type=int)
monkey = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == monkey_id).first()
if monkey is None:
abort(404)
friend = Monkey.query.options(
Load(Monkey).load_only(Monkey.name)
).filter(Monkey.id == friend_id).first()
if friend is None:
abort(404)
monkey.unset_best_friend()
db.session.commit()
flash(
'Best friend {0} unset for monkey {1}.'
.format(friend.name, monkey.name)
)
return redirect(url_for(
'.view_friend_list', monkey_id=monkey_id, page=page_was
))
| qqalexqq/monkeys | views.py | Python | mit | 10,528 |
#! -*- coding: utf-8 -*-
import ftplib
import socket
import os
from miscellaneous.MisExceptions import FtpConnectError
from miscellaneous.MisExceptions import FtpLoginError
from miscellaneous.MisExceptions import FtpDownloadError
from miscellaneous.MisExceptions import FtpUploadError
class BaseFtp(object):
CONNECT_STATUS = {0: 'OK', 1: 'Failed', 2: 'Processing', 3: "Preparing"}
LOGIN_STATUS = {0: 'OK', 1: 'Failed', 2: 'Processing', 3: "Preparing"}
def __init__(self, ftp_host, ftp_user, ftp_password, ftp_port):
self.ftp_host = ftp_host
self.ftp_user = ftp_user
self.ftp_password = ftp_password
self.ftp_port = ftp_port
self.myftp = None
self.connect_status = BaseFtp.CONNECT_STATUS[3]
self.login_status = BaseFtp.LOGIN_STATUS[3]
def connect(self):
self.myftp = ftplib.FTP()
try:
self.myftp.connect(self.ftp_host, self.ftp_port)
except socket.error:
self.connect_status = BaseFtp.CONNECT_STATUS[1]
raise FtpConnectError("Connect {} failed".format(self.ftp_host))
else:
self.connect_status = BaseFtp.CONNECT_STATUS[0]
return self.myftp
def login(self):
if self.connect_status == 'OK':
try:
self.myftp.login(self.ftp_user, self.ftp_password)
except ftplib.error_perm:
self.login_status = BaseFtp.LOGIN_STATUS[1]
raise FtpLoginError("Login ftp server {} Failed".format(self.ftp_host))
else:
self.login_status = BaseFtp.LOGIN_STATUS[0]
return self.myftp
@property
def dir(self):
return self.myftp.dir()
@property
def pwd(self):
return self.myftp.pwd()
def cwd(self, path):
try:
self.myftp.cwd(path)
except ftplib.error_perm as e:
print(e)
else:
print('Change Path {} Successfully'.format(path))
def mkd(self, directory):
try:
self.myftp.mkd(directory)
except ftplib.error_perm as e:
print(e)
else:
print("Create Directory {} Successfully".format(directory))
def rmd(self, directory):
try:
self.myftp.rmd(directory)
except ftplib.error_perm as e:
print(e)
else:
print("Remove Directory {} successfully".format(directory))
def download(self, file):
download_file = open(file, "wb").write
# 根据 ftplib 源码 retrbinary 的第2个参数为callback 方法. 此callback 会将 socket 收到的file data
# 以参数的形式写入, 所以这边需要将file 的write 方法 取了个download_file 名字.
try:
self.myftp.retrbinary('RETR %s' %file, download_file)
except:
raise FtpDownloadError('Download {} Failed'.format(file))
else:
print("Download {} Successfully".format(file))
def upload(self, file):
upload_file = open(file, 'rb')
try:
self.myftp.storbinary('STOR %s' % (os.path.basename(file)), upload_file)
except:
raise FtpUploadError("Upload {} Failed".format(upload_file))
else:
print("Upload {} Successfully".format(file))
finally:
upload_file.close()
def delete(self, file):
try:
self.myftp.delete(file)
except ftplib.error_perm as e:
print(e)
else:
print("Delete {} Successfully".format(file))
def upload_file_from_dir(self, dirs):
for root, directory, files in os.walk(dirs):
for file in files:
abs_file = os.path.join(root, file)
self.upload(abs_file)
def upload_ex(self, srcfile, destfile):
srcfile_obj = open(srcfile, 'rb')
try:
self.myftp.storbinary('STOR %s' % (os.path.basename(destfile)), srcfile_obj)
except:
raise FtpUploadError("Upload {} Failed".format(srcfile))
else:
print("Upload {0} Successfully to {1}".format(srcfile, destfile))
finally:
srcfile_obj.close()
def nlst(self):
return self.myftp.nlst()
# TODO: add more function using ftplib builtin function
def __del__(self):
self.myftp.quit()
| fumanne/miscellaneous | miscellaneous/MisFtp.py | Python | gpl-3.0 | 4,377 |
# -*- coding: utf-8 -*-
## begin license ##
#
# "Seecr Test" provides test tools.
#
# Copyright (C) 2012-2015, 2019-2021 Seecr (Seek You Too B.V.) https://seecr.nl
#
# This file is part of "Seecr Test"
#
# "Seecr Test" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Seecr Test" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Seecr Test"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
from seecr.test import SeecrTestCase, CallTrace
from seecr.test.io import stdout_replaced
from seecr.test.timing import T
from seecr.test.utils import ignoreLineNumbers, sleepWheel, parseHtmlAsXml, findTag, includeParentAndDeps, _parseData, mkdir, loadTestsFromPath, createReturnValue, assertHttpOK, postRequest
from lxml.etree import XMLSyntaxError
from time import time
from os import makedirs
from os.path import join, isdir
class UtilsTest(SeecrTestCase):
def testIgnoreLineNumber(self):
theTraceback = """Traceback (most recent call last):
File "../some/file.py", line 104, in aFunction
for _var in vars:
File "some/other/file.py", line 249, in anotherFuntion
raise Exception('xcptn')
Exception: xcptn\n"""
expected = """Traceback (most recent call last):
File [file.py], line [#], in aFunction
for _var in vars:
File [file.py], line [#], in anotherFuntion
raise Exception('xcptn')
Exception: xcptn\n"""
self.assertEqual(expected, ignoreLineNumbers(theTraceback))
def testSleepWheelNoCallback(self):
t0 = time()
with stdout_replaced():
retval = sleepWheel(0.01, interval=0.001)
t1 = time()
delta = t1 - t0
self.assertTrue(0.01 < delta < max(0.02, (0.02 * T_ADJUSTMENT * T)), delta)
self.assertEqual(False, retval)
def testSleepWheelCallbackFalsy(self):
calls = []
def callback():
calls.append(True)
t0 = time()
with stdout_replaced() as out:
retval = sleepWheel(0.01, interval=0.001, callback=callback)
t1 = time()
self.assertEqual('\\\x08|\x08/\x08-\x08\\\x08|\x08/\x08-\x08\\\x08|\x08', out.getvalue())
delta = t1 - t0
self.assertTrue(0.01 < delta < max(0.02, (0.02 * T_ADJUSTMENT * T)), delta)
self.assertEqual(10, len(calls))
self.assertEqual(False, retval)
def testSleepWheelCallbackTruthy(self):
calls = []
def callback():
calls.append(True)
return True
t0 = time()
with stdout_replaced() as out:
retval = sleepWheel(0.01, interval=0.001, callback=callback)
t1 = time()
self.assertEqual('\\\x08', out.getvalue())
delta = t1 - t0
self.assertTrue(0.001 < delta < max(0.002, (0.002 * T_ADJUSTMENT * T)), delta)
self.assertEqual(1, len(calls))
self.assertEqual(True, retval)
def testParseHtmlAsXml(self):
with stdout_replaced():
self.assertRaises(XMLSyntaxError, parseHtmlAsXml, b'<not xml>')
result = parseHtmlAsXml(b'<html><body>‘to the left ←’</body></html>')
self.assertEqual(['‘to the left <-’'], result.xpath('/html/body/text()'))
def testFindTag(self):
self.assertEqual(1, len(list(findTag("input", b"<input></input>"))))
self.assertEqual(1, len(list(findTag("input", b"<input />"))))
self.assertEqual(1, len(list(findTag("input", b"<input/>"))))
self.assertEqual(2, len(list(findTag("input", b"<form><input/><input></input></form>"))))
self.assertEqual(2, len(list(findTag("input", b"<form><input attr='value'/><input></input></form>"))))
self.assertEqual(2, len(list(findTag("input", b"<form><input></input><input/></form>"))))
self.assertEqual(1, len(list(findTag("a", b"<a><img/></a>"))))
self.assertEqual(1, len(list(findTag("a", b"<a>€</a>"))))
self.assertEqual(1, len(list(findTag("a", b"<html><a/><a class='test'>text</a></html>", **{"class": "test"}))))
self.assertEqual(1, len(list(findTag("a", b"<html><a a='1' b='2'/><a a='1'/></html>", **dict(a=1, b=2)))))
def testParseData(self):
data = b"HTTP/1.1 200 Ok\r\nContent-Type: whatever\r\nother-header: value\r\n\r\ndata"
statusAndHeaders, body = _parseData(data)
self.assertEqual('200', statusAndHeaders["StatusCode"])
self.assertEqual({'Content-Type': 'whatever', 'Other-Header': 'value'}, statusAndHeaders["Headers"])
self.assertEqual(b'data', body)
def testParseDataEmptyBody(self):
data = b'HTTP/1.0 503 Service Temporarily Unavailable\r\n\r\n'
statusAndHeaders, body = _parseData(data)
self.assertEqual('503', statusAndHeaders["StatusCode"])
self.assertEqual({}, statusAndHeaders["Headers"])
self.assertEqual(b'', body)
def testCreateReturnValue(self):
data = b"HTTP/1.1 200 Ok\r\nContent-Type: whatever\r\nother-header: value\r\n\r\ndata"
statusAndHeaders, body = createReturnValue(data, parse=True)
self.assertEqual('200', statusAndHeaders["StatusCode"])
self.assertEqual({'Content-Type': 'whatever', 'Other-Header': 'value'}, statusAndHeaders["Headers"])
self.assertEqual(b'data', body)
data = b"HTTP/1.1 200 Ok\r\nContent-Type: application/json\r\nother-header: value\r\n\r\n{\"key\": 42}"
statusAndHeaders, body = createReturnValue(data, parse=True)
self.assertEqual(dict(key=42), body)
data = b"HTTP/1.1 200 Ok\r\nother-header: value\r\n\r\n<aap>noot</aap>"
statusAndHeaders, body = createReturnValue(data, parse=True)
self.assertEqual(['noot'], body.xpath('/aap/text()'))
statusAndHeaders, body = createReturnValue(data, parse=False)
self.assertEqual(b'<aap>noot</aap>', body)
# Make a list if header appears more than once
data = b"HTTP/1.1 200 Ok\r\nother-header: whatever\r\nother-header: value\r\n\r\ndata"
statusAndHeaders, body = createReturnValue(data, parse=True)
self.assertEqual({'Other-Header': ['whatever', 'value']}, statusAndHeaders["Headers"])
# Set-Cookie is always a list
data = b"HTTP/1.1 200 Ok\r\nSet-Cookie: whatever\r\n\r\ndata"
statusAndHeaders, body = createReturnValue(data, parse=True)
self.assertEqual({'Set-Cookie': ['whatever']}, statusAndHeaders["Headers"])
def testMkdir(self):
self.assertFalse(isdir(join(self.tempdir, "mkdir")))
self.assertEqual(join(self.tempdir, "mkdir"), mkdir(self.tempdir, "mkdir"))
self.assertTrue(isdir(join(self.tempdir, "mkdir")))
self.assertFalse(isdir(join(self.tempdir, "1", "2", "3", "4")))
mkdir(self.tempdir, "1", "2", "3", "4")
self.assertTrue(isdir(join(self.tempdir, "1", "2", "3", "4")))
def testLoadTestFromPath(self):
g = {}
loadTestsFromPath(self.tempdir, _globals=g)
self.assertEqual({}, g)
with open(join(self.tempdir, "sometest.py"), "w") as fp:
fp.write(TEST_TEMPLATE)
loadTestsFromPath(self.tempdir, _globals=g)
self.assertTrue('SomeTest' in g, g)
def testLoadTestFromPathSubDirs(self):
with open(join(self.tempdir, "sometest.py"), "w") as fp:
fp.write(TEST_TEMPLATE)
with open(join(mkdir(self.tempdir, "sub"), "sometest.py"), "w") as fp:
fp.write(TEST_TEMPLATE)
g = {}
loadTestsFromPath(self.tempdir, _globals=g)
self.assertEqual(2, len(g))
self.assertEqual({'sub.SomeTest', 'SomeTest'}, set(g.keys()))
def testAssertHttpOK(self):
headers = {'StatusCode': '302', 'Headers': {'Location': '/form'}}
assertHttpOK(headers, '', expectedStatus=302)
headers = {'StatusCode': '302', 'Headers': {'Location': '/form'}}
assertHttpOK(headers, '', expectedStatus="302")
try:
headers = {'StatusCode': '200', 'Headers': {'Location': '/form'}}
assertHttpOK(headers, '', expectedStatus=302)
except AssertionError as e:
self.assertEqual('HTTP Status code; expected 302, got 200', str(e))
try:
body = 'blah blah Traceback blah blah'
assertHttpOK({'StatusCode': '302'}, body, expectedStatus=302)
except AssertionError as e:
self.assertEqual('Traceback found in body:\n{}'.format(body), str(e))
try:
body = b'blah blah Traceback blah blah'
assertHttpOK({'StatusCode': '302'}, body, expectedStatus=302)
except AssertionError as e:
self.assertEqual('Traceback found in body', str(e))
def testPostRequestWithCookie(self):
mockSocket = self.createMockSocket([b'HTTP/1.1 200 Ok\r\nMy-Header: this\r\n\r\ndata'])
headers, result = postRequest(12345,
'/some/path',
data=b'lekker, lekker',
cookie='gevulde-koek',
timeOutInSeconds=200,
_createSocket=mockSocket.createSocket,
parse=False)
self.assertEqual(b'data', result)
self.assertEqual({"StatusCode":'200', 'Headers':{"My-Header": 'this'}}, headers)
self.assertEqual(['createSocket', 'send', 'recv', 'recv', 'close'], mockSocket.calledMethodNames())
create, send = mockSocket.calledMethods[:2]
self.assertEqual((12345, 200), create.args)
self.assertEqual('''POST /some/path HTTP/1.0
Content-Length: 14
Content-Type: text/xml; charset="utf-8"
Cookie: gevulde-koek
lekker, lekker''', send.args[0].decode().replace('\r\n','\n'))
def createMockSocket(self, responses):
def recv(*args):
return responses.pop() if len(responses) else None
mockSocket = CallTrace(returnValues={'close':None}, methods=dict(send=lambda data: len(data), recv=recv))
mockSocket.returnValues['createSocket'] = mockSocket
mockSocket.methods['send'] = lambda data: len(data)
return mockSocket
TEST_TEMPLATE = """from seecr.test import SeecrTestCase
class SomeTest(SeecrTestCase):
def testOne(self):
pass"""
T_ADJUSTMENT = 1.5
| seecr/seecr-test | test/utilstest.py | Python | gpl-2.0 | 10,633 |
# -*- coding: utf-8 -*-
"""Electromagnetic radiation
"""
| woutdenolf/spectrocrunch | spectrocrunch/sources/__init__.py | Python | mit | 58 |
#*****************************************************************************
# fseed.py
#
# SEED builder for SeisComP
#
# (c) 2005 Andres Heinloo, GFZ Potsdam
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any later
# version. For more information, see http://www.gnu.org/
#*****************************************************************************
import re
import datetime
import mseedlite as mseed
from tempfile import TemporaryFile
from shutil import copyfileobj
from seiscomp import logs
_RECLEN_EXP = 12
def _min_data_gap(fsamp):
return datetime.timedelta(microseconds=1000000/(fsamp * 10))
_min_ts_gap = datetime.timedelta(minutes=1)
class SEEDError(Exception):
pass
_rx_coeff = re.compile(r'\s*(\S+)\s*')
def _mkseedcoeff(nblk, nfld, ncoeff, s):
pos = 0
n = 0
c = ""
while pos < len(s):
m = _rx_coeff.match(s, pos)
if m == None:
raise SEEDError, "blockette %d, field %d: error parsing FIR coefficients at '%s'" % (nblk, nfld, s[pos:])
try:
v = float(m.group(1))
except ValueError:
raise SEEDError, "blockette %d, field %d: error parsing FIR coefficients at '%s'" % (nblk, nfld, s[pos:])
c += "%14.7E" % (v,)
n += 1
pos = m.end()
if n != ncoeff:
raise SEEDError, "blockette %d, field %d: expected %d coefficients, found %d" % (nblk, nfld, ncoeff, n)
return c
def _mkseedcoeff2(nblk, nfld, ncoeff, s):
pos = 0
n = 0
c = ""
while pos < len(s):
m = _rx_coeff.match(s, pos)
if m == None:
raise SEEDError, "blockette %d, field %d: error parsing polynomial coefficients at '%s'" % (nblk, nfld, s[pos:])
try:
v = float(m.group(1))
except ValueError:
raise SEEDError, "blockette %d, field %d: error parsing polynomial coefficients at '%s'" % (nblk, nfld, s[pos:])
c += "%12.5E%12.5E" % (v,0)
n += 1
pos = m.end()
if n != ncoeff:
raise SEEDError, "blockette %d, field %d: expected %d coefficients, found %d" % (nblk, nfld, ncoeff, n)
return c
_rx_paz = re.compile(r'\s*([0-9]*)\(\s*([^,]+),\s*([^)]+)\)\s*')
def _mkseedpaz(nblk, nfld, npaz, s):
pos = 0
n = 0
c = ""
while pos < len(s):
m = _rx_paz.match(s, pos)
if m == None:
raise SEEDError, "blockette %d, field %d: error parsing PAZ at '%s'" % (nblk, nfld, s[pos:])
try:
if len(m.group(1)) > 0:
x = int(m.group(1))
else:
x = 1
rv = float(m.group(2))
iv = float(m.group(3))
except ValueError:
raise SEEDError, "blockette %d, field %d: error parsing PAZ at '%s'" % (nblk, nfld, s[pos:])
for i in xrange(0, x):
c += "%12.5E%12.5E 0.00000E-00 0.00000E-00" % (rv, iv)
n += x
pos = m.end()
if n != npaz:
raise SEEDError, "blockette %d, field %d: expected %d PAZ, found %d" % (nblk, nfld, npaz, n)
return c
def _mkseedstring(nblk, nfld, s, min_length, max_length, flags):
U = L = N = P = S = X = False
rx_list = []
if flags.find("U") != -1:
U = True
rx_list.append("[A-Z]")
if flags.find("L") != -1:
L = True
rx_list.append("[a-z]")
if flags.find("N") != -1:
N = True
rx_list.append("[0-9]")
if flags.find("P") != -1:
P = True
rx_list.append("[^A-Za-z0-9 ]")
if flags.find("S") != -1:
S = True
rx_list.append(" ")
if flags.find("_") != -1:
X = True
rx_list.append("_")
sn = s.strip()[:max_length]
if U and not L:
sn = sn.upper()
elif L and not U:
sn = sn.lower()
if S and not X:
sn = sn.replace("_", " ")
elif X and not S:
sn = sn.replace(" ", "_")
rx = "|".join(rx_list)
sn = "".join(re.findall(rx, sn))
if re.match("(" + rx + ")*$", sn) == None:
raise SEEDError, "blockette %d, field %d: cannot convert string \"%s\" with flags %s" % \
(nblk, nfld, s, flags)
if len(sn) < min_length:
if min_length != max_length:
raise SEEDError, "blockette %d, field %d: cannot extend string \"%s\" to minimum length %d with flags %s" % \
(nblk, nfld, s, min_length, flags)
else:
sn = (sn + min_length * " ")[:min_length]
if min_length != max_length:
sn += "~"
return sn
def _mkseedtime(nblk, nfld, t):
if t == None:
return "~"
if isinstance(t, datetime.datetime):
tt = t.utctimetuple()
return "%04d,%03d,%02d:%02d:%02d.%04d~" % (t.year, tt[7],
t.hour, t.minute, t.second, t.microsecond // 100)
elif isinstance(t, datetime.date):
tt = datetime.datetime.combine(t, datetime.time(0, 0, 0)).utctimetuple()
return "%04d,%03d~" % (t.year, tt[7])
raise SEEDError, "blockette %d, field %d: invalid time object: %s" % (nblk, nfld, str(t))
def _cmptime(t1, t2):
if t1 is None and t2 is None:
return 0
elif t2 is None or (t1 is not None and t1 < t2):
return -1
elif t1 is None or (t2 is not None and t1 > t2):
return 1
return 0
def _is_fir_response(obj):
return hasattr(obj, "symmetry")
def _is_paz_response(obj):
return hasattr(obj, "poles")
def _is_poly_response(obj):
return hasattr(obj, "approximationType")
class _Blockette10(object):
def __init__(self, record_length, start_time, end_time, vol_time,
organization, label):
self.__record_length = record_length
self.__start_time = _mkseedtime(10, 5, start_time)
self.__end_time = _mkseedtime(10, 6, end_time)
self.__vol_time = _mkseedtime(10, 7, vol_time)
self.__organization = _mkseedstring(10, 8, organization, 1, 80, "UNLPS_")
self.__label = _mkseedstring(10, 9, label, 1, 80, "UNLPS_")
self.__len = 13 + len(self.__start_time) + len(self.__end_time) + \
len(self.__vol_time) + len(self.__organization) + \
len(self.__label)
def output(self, f):
blk = "010%4d 2.3%2d%s%s%s%s%s" % (self.__len, self.__record_length,
self.__start_time, self.__end_time, self.__vol_time,
self.__organization, self.__label)
if len(blk) != self.__len:
raise SEEDError, "blockette 10 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette11(object):
def __init__(self):
self.__nstations = 0
self.__stat_rec = ()
self.__len = 10
def add_station(self, code, recno):
self.__stat_rec += (_mkseedstring(11, 4, code, 5, 5, "UN"), recno)
self.__nstations += 1
self.__len += 11
def __output_huge(self, f):
n = 0
while n < self.__nstations:
ns = min(self.__nstations - n, 908)
blen = 10 + 11 * ns
blk = ("011%4d%3d" + ns * "%s%6d") % \
((blen, ns) + self.__stat_rec[2*n:2*(n+ns)])
if len(blk) != blen:
raise SEEDError, "blockette 11 has invalid length: %d instead of %d" % (len(blk), blen)
f.write_blk(blk)
n += ns
def output(self, f):
if self.__len > 9999:
self.__output_huge(f)
return
blk = ("011%4d%3d" + self.__nstations * "%s%6d") % \
((self.__len, self.__nstations) + self.__stat_rec)
if len(blk) != self.__len:
raise SEEDError, "blockette 11 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette12(object):
def __init__(self):
self.__nspans = 0
self.__span_rec = ()
self.__len = 11
def add_span(self, begin, end, recno):
self.__span_rec += (_mkseedtime(12, 4, begin), _mkseedtime(12, 5, end), recno)
self.__nspans += 1
self.__len += 52
def __output_huge(self, f):
n = 0
while n < self.__nspans:
ns = min(self.__nspans - n, 192)
blen = 11 + 52 * ns
blk = ("012%4d%4d" + ns * "%s%s%6d") % \
((blen, ns) + self.__span_rec[3*n:3*(n+ns)])
if len(blk) != blen:
raise SEEDError, "blockette 12 has invalid length: %d instead of %d" % (len(blk), blen)
f.write_blk(blk)
n += ns
def output(self, f):
if self.__len > 9999:
self.__output_huge(f)
return
blk = ("012%4d%4d" + self.__nspans * "%s%s%6d") % \
((self.__len, self.__nspans) + self.__span_rec)
if len(blk) != self.__len:
raise SEEDError, "blockette 12 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette30(object):
def __init__(self, name, key, family, ddl):
self.__name = _mkseedstring(30, 3, name, 1, 50, "UNLPS")
self.__key = key
self.__family = family
self.__ddl = "~".join(ddl) + "~"
self.__nddl = len(ddl)
self.__len = 16 + len(self.__name) + len(self.__ddl)
def output(self, f):
blk = "030%4d%s%4d%3d%2d%s" % (self.__len, self.__name,
self.__key, self.__family, self.__nddl, self.__ddl)
if len(blk) != self.__len:
raise SEEDError, "blockette 30 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette31(object):
def __init__(self, key, comment):
self.__key = key
self.__comment = _mkseedstring(31, 5, comment, 1, 70, "UNLPS")
self.__len = 15 + len(self.__comment)
def output(self, f):
blk = "031%4d%4dX%s 0" % (self.__len, self.__key, self.__comment)
class _Blockette33(object):
def __init__(self, key, desc):
self.__key = key
self.__desc = _mkseedstring(33, 4, desc, 1, 50, "UNLPS")
self.__len = 10 + len(self.__desc)
def output(self, f):
blk = "033%4d%3d%s" % (self.__len, self.__key, self.__desc)
if len(blk) != self.__len:
raise SEEDError, "blockette 33 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette34(object):
def __init__(self, key, name, desc):
self.__key = key
self.__name = _mkseedstring(34, 4, name, 1, 20, "UNP")
self.__desc = _mkseedstring(34, 5, desc, 1, 50, "UNLPS")
self.__len = 10 + len(self.__name) + len(self.__desc)
def output(self, f):
blk = "034%4d%3d%s%s" % (self.__len, self.__key, self.__name,
self.__desc)
if len(blk) != self.__len:
raise SEEDError, "blockette 34 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette41(object):
def __init__(self, key, name, symmetry, input_units, output_units, ncoeff,
coeff):
self.__key = key
self.__name = _mkseedstring(41, 4, name, 1, 25, "UN")
self.__symmetry = _mkseedstring(41, 5, symmetry, 1, 1, "U")
self.__input_units = input_units
self.__output_units = output_units
self.__ncoeff = ncoeff
self.__coeff = _mkseedcoeff(41, 9, ncoeff, coeff)
self.__len = 22 + 14 * ncoeff + len(self.__name)
def __output_huge(self, f):
n = 0
while n < self.__ncoeff:
nc = min(self.__ncoeff - n, (9977 - len(self.__name)) // 14)
blen = 22 + 14 * nc + len(self.__name)
blk = "041%4d%4d%s%s%3d%3d%4d%s" % (blen, self.__key,
self.__name, self.__symmetry, self.__input_units,
self.__output_units, self.__ncoeff, self.__coeff[14*n:14*(n+nc)])
if len(blk) != blen:
raise SEEDError, "blockette 41 has invalid length: %d instead of %d" % (len(blk), blen)
f.write_blk(blk)
n += nc
def output(self, f):
if self.__len > 9999:
self.__output_huge(f)
return
blk = "041%4d%4d%s%s%3d%3d%4d%s" % (self.__len, self.__key,
self.__name, self.__symmetry, self.__input_units,
self.__output_units, self.__ncoeff, self.__coeff)
if len(blk) != self.__len:
raise SEEDError, "blockette 41 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette42(object):
def __init__(self, key, name, input_units, output_units, freq_unit, low_freq,
high_freq, approx_type, approx_lower_bound, approx_upper_bound,
approx_error, ncoeff, coeff):
self.__key = key
self.__name = _mkseedstring(42, 4, name, 1, 25, "UN")
self.__input_units = input_units
self.__output_units = output_units
self.__freq_unit = freq_unit
self.__low_freq = low_freq
self.__high_freq = high_freq
self.__approx_type = approx_type
self.__approx_lower_bound = approx_lower_bound
self.__approx_upper_bound = approx_upper_bound
self.__approx_error = approx_error
self.__ncoeff = ncoeff
self.__coeff = _mkseedcoeff2(42, 16, ncoeff, coeff)
self.__len = 83 + 24 * ncoeff + len(self.__name)
def output(self, f):
blk = "042%4d%4d%sP%3d%3d%1s%1s%12.5E%12.5E%12.5E%12.5E%12.5E%3d%s" % (self.__len,
self.__key, self.__name, self.__input_units, self.__output_units,
self.__approx_type, self.__freq_unit, self.__low_freq, self.__high_freq,
self.__approx_lower_bound, self.__approx_upper_bound, self.__approx_error,
self.__ncoeff, self.__coeff)
if len(blk) != self.__len:
raise SEEDError, "blockette 42 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette43(object):
def __init__(self, key, name, type, input_units, output_units, norm_fac,
norm_freq, nzeros, zeros, npoles, poles):
self.__key = key
self.__name = _mkseedstring(43, 4, name, 1, 25, "UN")
self.__type = _mkseedstring(43, 5, type, 1, 1, "U")
self.__input_units = input_units
self.__output_units = output_units
self.__norm_fac = norm_fac
self.__norm_freq = norm_freq
self.__nzeros = nzeros
self.__zeros = _mkseedpaz(43, 11, nzeros, zeros)
self.__npoles = npoles
self.__poles = _mkseedpaz(43, 16, npoles, poles)
self.__len = 48 + 48 * (nzeros + npoles) + len(self.__name)
def output(self, f):
blk = "043%4d%4d%s%s%3d%3d%12.5E%12.5E%3d%s%3d%s" % \
(self.__len, self.__key, self.__name, self.__type,
self.__input_units, self.__output_units, self.__norm_fac,
self.__norm_freq, self.__nzeros, self.__zeros, self.__npoles,
self.__poles)
if len(blk) != self.__len:
raise SEEDError, "blockette 43 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette44(object):
def __init__(self, key, name, type, input_units, output_units):
self.__key = key
self.__name = _mkseedstring(44, 4, name, 1, 25, "UN")
self.__type = _mkseedstring(44, 5, type, 1, 1, "U")
self.__input_units = input_units
self.__output_units = output_units
self.__len = 26 + len(self.__name)
def output(self, f):
blk = "044%4d%4d%s%s%3d%3d 0 0" % (self.__len, self.__key,
self.__name, self.__type, self.__input_units,
self.__output_units)
if len(blk) != self.__len:
raise SEEDError, "blockette 44 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette47(object):
def __init__(self, key, name, input_rate, deci_fac, deci_offset,
delay, correction):
self.__key = key
self.__name = _mkseedstring(47, 4, name, 1, 25, "UN")
self.__input_rate = input_rate
self.__deci_fac = deci_fac
self.__deci_offset = deci_offset
self.__delay = delay
self.__correction = correction
self.__len = 53 + len(self.__name)
def output(self, f):
blk = "047%4d%4d%s%10.4E%5d%5d%11.4E%11.4E" % (self.__len,
self.__key, self.__name, self.__input_rate, self.__deci_fac,
self.__deci_offset, self.__delay, self.__correction)
if len(blk) != self.__len:
raise SEEDError, "blockette 47 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette48(object):
def __init__(self, key, name, gain, gain_freq):
self.__key = key
self.__name = _mkseedstring(48, 4, name, 1, 25, "UN")
self.__gain = gain
self.__gain_freq = gain_freq
self.__len = 37 + len(self.__name)
def output(self, f):
blk = "048%4d%4d%s%12.5E%12.5E 0" % (self.__len, self.__key,
self.__name, self.__gain, self.__gain_freq)
if len(blk) != self.__len:
raise SEEDError, "blockette 48 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette50(object):
def __init__(self, stat_code, latitude, longitude, elevation,
site_name, net_id, net_code, start_date, end_date):
self.__stat_code = _mkseedstring(50, 3, stat_code, 5, 5, "UN")
self.__latitude = latitude
self.__longitude = longitude
self.__elevation = elevation
self.__site_name = _mkseedstring(50, 9, site_name, 1, 60, "UNLPS")
self.__net_id = net_id
self.__start_date = _mkseedtime(50, 13, start_date)
self.__end_date = _mkseedtime(50, 14, end_date)
self.__net_code = _mkseedstring(50, 16, net_code, 2, 2, "UN")
self.__len = 59 + len(self.__site_name) + len(self.__start_date) + \
len(self.__end_date)
def output(self, f):
blk = "050%4d%s%10.6f%11.6f%7.1f %s%3d321010%s%sN%s" % \
(self.__len, self.__stat_code, self.__latitude, self.__longitude,
self.__elevation, self.__site_name, self.__net_id,
self.__start_date, self.__end_date, self.__net_code)
if len(blk) != self.__len:
raise SEEDError, "blockette 50 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette51(object):
def __init__(self, start_time, end_time, comment_key):
self.__start_time = _mkseedtime(51, 3, start_time)
self.__end_time = _mkseedtime(51, 4, end_time)
self.__comment_key = comment_key
self.__len = 17 + len(self.__start_time) + len(self.__end_time)
def output(self, f):
blk = "051%4d%s%s%4d 0" % (self.__len, self.__start_time,
self.__end_time, self.__comment_key)
if len(blk) != self.__len:
raise SEEDError, "blockette 51 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette52(object):
def __init__(self, loc_id, chan_id, instr_id, comment, signal_units,
calibration_units, latitude, longitude, elevation, local_depth,
azimuth, dip, data_format, record_length, sample_rate, clock_drift,
flags, start_date, end_date):
self.__loc_id = _mkseedstring(52, 3, loc_id, 2, 2, "UN")
self.__chan_id = _mkseedstring(52, 4, chan_id, 3, 3, "UN")
self.__instr_id = instr_id
self.__comment = _mkseedstring(52, 7, comment, 0, 30, "UNLPS")
self.__signal_units = signal_units
self.__calibration_units = calibration_units
self.__latitude = latitude
self.__longitude = longitude
self.__elevation = elevation
self.__local_depth = local_depth
self.__azimuth = azimuth
self.__dip = dip
self.__data_format = data_format
self.__record_length = record_length
self.__sample_rate = sample_rate
self.__clock_drift = clock_drift
self.__flags = _mkseedstring(52, 21, flags, 0, 26, "U")
self.__raw_start_date = start_date
self.__raw_end_date = end_date
self.__len = 0
def set_vol_span(self, vol_start, vol_end):
# make verseed happy
if _cmptime(self.__raw_end_date, vol_end) > 0:
self.__raw_end_date = vol_end
if _cmptime(self.__raw_start_date, vol_start) < 0:
self.__raw_start_date = vol_start
def output(self, f):
self.__start_date = _mkseedtime(52, 22, self.__raw_start_date)
self.__end_date = _mkseedtime(52, 23, self.__raw_end_date)
self.__len = 99 + len(self.__comment) + len(self.__flags) + \
len(self.__start_date) + len(self.__end_date)
blk = "052%4d%s%s 0%3d%s%3d%3d%10.6f%11.6f%7.1f%5.1f%5.1f%5.1f%4d%2d%10.4E%10.4E %s%s%sN" % \
(self.__len, self.__loc_id, self.__chan_id, self.__instr_id,
self.__comment, self.__signal_units, self.__calibration_units,
self.__latitude, self.__longitude, self.__elevation,
self.__local_depth, self.__azimuth, self.__dip, self.__data_format,
self.__record_length, self.__sample_rate, self.__clock_drift,
self.__flags, self.__start_date, self.__end_date)
if len(blk) != self.__len:
raise SEEDError, "blockette 52 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette53(object):
def __init__(self, type, input_units, output_units, norm_fac,
norm_freq, nzeros, zeros, npoles, poles):
self.__type = _mkseedstring(53, 3, type, 1, 1, "U")
self.__stage = 0
self.__input_units = input_units
self.__output_units = output_units
self.__norm_fac = norm_fac
self.__norm_freq = norm_freq
self.__nzeros = nzeros
self.__zeros = _mkseedpaz(53, 10, nzeros, zeros)
self.__npoles = npoles
self.__poles = _mkseedpaz(53, 15, npoles, poles)
self.__len = 46 + 48 * (nzeros + npoles)
def set_stage(self, stage):
self.__stage = stage
def output(self, f):
blk = "053%4d%s%2d%3d%3d%12.5E%12.5E%3d%s%3d%s" % \
(self.__len, self.__type, self.__stage,
self.__input_units, self.__output_units, self.__norm_fac,
self.__norm_freq, self.__nzeros, self.__zeros, self.__npoles,
self.__poles)
if len(blk) != self.__len:
raise SEEDError, "blockette 53 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette54(object):
def __init__(self, type, input_units, output_units):
self.__type = _mkseedstring(54, 3, type, 1, 1, "U")
self.__stage = 0
self.__input_units = input_units
self.__output_units = output_units
self.__len = 24
def set_stage(self, stage):
self.__stage = stage
def output(self, f):
blk = "054%4d%s%2d%3d%3d 0 0" % (self.__len,
self.__type, self.__stage, self.__input_units,
self.__output_units)
if len(blk) != self.__len:
raise SEEDError, "blockette 54 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette57(object):
def __init__(self, input_rate, deci_fac, deci_offset, delay, correction):
self.__stage = 0
self.__input_rate = input_rate
self.__deci_fac = deci_fac
self.__deci_offset = deci_offset
self.__delay = delay
self.__correction = correction
self.__len = 51
def set_stage(self, stage):
self.__stage = stage
def output(self, f):
blk = "057%4d%2d%10.4E%5d%5d%11.4E%11.4E" % (self.__len,
self.__stage, self.__input_rate, self.__deci_fac,
self.__deci_offset, self.__delay, self.__correction)
if len(blk) != self.__len:
raise SEEDError, "blockette 57 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette58(object):
def __init__(self, gain, gain_freq):
self.__stage = 0
self.__gain = gain
self.__gain_freq = gain_freq
self.__len = 35
def set_stage(self, stage):
self.__stage = stage
def output(self, f):
blk = "058%4d%2d%12.5E%12.5E 0" % (self.__len, self.__stage,
self.__gain, self.__gain_freq)
if len(blk) != self.__len:
raise SEEDError, "blockette 58 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette60(object):
def __init__(self, start_stage):
self.__start_stage = start_stage
self.__ref_list = []
self.__len = 9
def add_stage(self, *keyref):
self.__ref_list.append(keyref)
self.__len += 4 + 4 * len(keyref)
def output(self, f):
blk = "060%4d%2d" % (self.__len, len(self.__ref_list))
for (n, r) in enumerate(self.__ref_list):
blk += ("%2d%2d" + len(r) * "%4d") % \
((self.__start_stage + n, len(r)) + r)
if len(blk) != self.__len:
raise SEEDError, "blockette 60 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette61(object):
def __init__(self, name, symmetry, input_units, output_units, ncoeff,
coeff):
self.__stage = 0
self.__name = _mkseedstring(61, 4, name, 1, 25, "UN")
self.__symmetry = _mkseedstring(61, 5, symmetry, 1, 1, "U")
self.__input_units = input_units
self.__output_units = output_units
self.__ncoeff = ncoeff
self.__coeff = _mkseedcoeff(61, 9, ncoeff, coeff)
self.__len = 20 + 14 * ncoeff + len(self.__name)
def set_stage(self, stage):
self.__stage = stage
def __output_huge(self, f):
n = 0
while n < self.__ncoeff:
nc = min(self.__ncoeff - n, (9977 - len(self.__name)) // 14)
blen = 20 + 14 * nc + len(self.__name)
blk = "061%4d%2d%s%s%3d%3d%4d%s" % (blen, self.__stage,
self.__name, self.__symmetry, self.__input_units,
self.__output_units, self.__ncoeff, self.__coeff[14*n:14*(n+nc)])
if len(blk) != blen:
raise SEEDError, "blockette 61 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
n += nc
def output(self, f):
if self.__len > 9999:
self.__output_huge(f)
return
blk = "061%4d%2d%s%s%3d%3d%4d%s" % (self.__len, self.__stage,
self.__name, self.__symmetry, self.__input_units,
self.__output_units, self.__ncoeff, self.__coeff)
if len(blk) != self.__len:
raise SEEDError, "blockette 61 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette62(object):
def __init__(self, input_units, output_units, freq_unit, low_freq,
high_freq, approx_type, approx_lower_bound, approx_upper_bound,
approx_error, ncoeff, coeff):
self.__stage = 0
self.__input_units = input_units
self.__output_units = output_units
self.__freq_unit = freq_unit
self.__low_freq = low_freq
self.__high_freq = high_freq
self.__approx_type = approx_type
self.__approx_lower_bound = approx_lower_bound
self.__approx_upper_bound = approx_upper_bound
self.__approx_error = approx_error
self.__ncoeff = ncoeff
self.__coeff = _mkseedcoeff2(62, 15, ncoeff, coeff)
self.__len = 81 + 24 * ncoeff
def set_stage(self, stage):
self.__stage = stage
def output(self, f):
blk = "062%4d%2dP%3d%3d%1s%1s%12.5E%12.5E%12.5E%12.5E%12.5E%3d%s" % (self.__len,
self.__stage, self.__input_units, self.__output_units,
self.__approx_type, self.__freq_unit, self.__low_freq, self.__high_freq,
self.__approx_lower_bound, self.__approx_upper_bound, self.__approx_error,
self.__ncoeff, self.__coeff)
if len(blk) != self.__len:
raise SEEDError, "blockette 42 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette70(object):
def __init__(self, flag, begin, end):
self.__flag = flag
self.__begin = _mkseedtime(70, 4, begin)
self.__end = _mkseedtime(70, 5, end)
self.__len = 54
def output(self, f):
blk = "070%4d%c%s%s" % (self.__len, self.__flag, self.__begin,
self.__end)
if len(blk) != self.__len:
raise SEEDError, "blockette 70 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _Blockette74(object):
def __init__(self, net_code, stat_code, loc_id, chan_id,
start_time, start_recno, end_time, end_recno):
self.__net_code = _mkseedstring(74, 16, net_code, 2, 2, "UN")
self.__stat_code = _mkseedstring(74, 3, stat_code, 5, 5, "UN")
self.__loc_id = _mkseedstring(74, 4, loc_id, 2, 2, "UN")
self.__chan_id = _mkseedstring(74, 5, chan_id, 3, 3, "UN")
self.__start_time = _mkseedtime(74, 6, start_time)
self.__start_recno = start_recno
self.__end_time = _mkseedtime(74, 9, end_time)
self.__end_recno = end_recno
self.__len = 84
def add_accelerator(self, recno):
pass
def output(self, f):
blk = "074%4d%s%s%s%s%6d 1%s%6d 1 0%s" % (self.__len,
self.__stat_code, self.__loc_id, self.__chan_id,
self.__start_time, self.__start_recno, self.__end_time,
self.__end_recno, self.__net_code)
if len(blk) != self.__len:
raise SEEDError, "blockette 70 has invalid length: %d instead of %d" % (len(blk), self.__len)
f.write_blk(blk)
class _FormatDict(object):
__formats = {
"Steim1": ("Steim1 Integer Compression Format", 50,
"F1 P4 W4 D C2 R1 P8 W4 D C2",
"P0 W4 N15 S2,0,1",
"T0 X W4",
"T1 Y4 W7 D C2",
"T2 Y2 W2 D C2",
"T3 N0 W4 D C2"),
"steim1": ("Steim1 Integer Compression Format", 50,
"F1 P4 W4 D C2 R1 P8 W4 D C2",
"P0 W4 N15 S2,0,1",
"T0 X W4",
"T1 Y4 W7 D C2",
"T2 Y2 W2 D C2",
"T3 N0 W4 D C2"),
"Steim2": ("Steim2 Integer Compression Format", 50,
"F1 P4 W4 D C2 R1 P8 W4 D C2",
"P0 W4 N15 S2,0,1",
"T0 X W4",
"T1 Y4 W1 D C2",
"T2 W4 I D2",
"K0 X D30",
"K1 N0 D30 C2",
"K2 Y2 D15 C2",
"K3 Y3 D10 C2",
"T3 W4 I D2",
"K0 Y5 D6 C2",
"K1 Y6 D5 C2",
"K2 X D2 Y7 D4 C2",
"K3 X D30"),
"steim2": ("Steim2 Integer Compression Format", 50,
"F1 P4 W4 D C2 R1 P8 W4 D C2",
"P0 W4 N15 S2,0,1",
"T0 X W4",
"T1 Y4 W1 D C2",
"T2 W4 I D2",
"K0 X D30",
"K1 N0 D30 C2",
"K2 Y2 D15 C2",
"K3 Y3 D10 C2",
"T3 W4 I D2",
"K0 Y5 D6 C2",
"K1 Y6 D5 C2",
"K2 X D2 Y7 D4 C2",
"K3 X D30"),
"mseed10": ("Steim1 Integer Compression Format", 50,
"F1 P4 W4 D C2 R1 P8 W4 D C2",
"P0 W4 N15 S2,0,1",
"T0 X W4",
"T1 Y4 W7 D C2",
"T2 Y2 W2 D C2",
"T3 N0 W4 D C2"),
"mseed11": ("Steim2 Integer Compression Format", 50,
"F1 P4 W4 D C2 R1 P8 W4 D C2",
"P0 W4 N15 S2,0,1",
"T0 X W4",
"T1 Y4 W1 D C2",
"T2 W4 I D2",
"K0 X D30",
"K1 N0 D30 C2",
"K2 Y2 D15 C2",
"K3 Y3 D10 C2",
"T3 W4 I D2",
"K0 Y5 D6 C2",
"K1 Y6 D5 C2",
"K2 X D2 Y7 D4 C2",
"K3 X D30"),
"mseed13": ("GEOSCOPE Multiplexed Format 16 bit gain ranged, 3 bit exponent", 1,
"M0",
"W2 D0-11 A-2048",
"D12-14",
"E2:0:-1"),
"mseed14": ("GEOSCOPE Multiplexed Format 16 bit gain ranged, 4 bit exponent", 1,
"M0",
"W2 D0-11 A-2048",
"D12-15",
"E2:0:-1"),
"mseed0": ("ASCII console log", 80, ""),
"ASCII": ("ASCII console log", 80, "") }
def __init__(self):
self.__num = 0
self.__used = {}
self.__blk = []
def lookup(self, name):
k = self.__used.get(name)
if k is not None:
return k
self.__num += 1
k = self.__num
self.__used[name] = k
f = self.__formats.get(name)
if f is None:
raise SEEDError, "unknown data format: " + name
b = _Blockette30(name = f[0], key = k, family = f[1], ddl = f[2:])
self.__blk.append(b)
return k
def output(self, f):
for b in self.__blk:
b.output(f)
class _UnitDict(object):
__units = {
"COUNTS": "Digital Counts",
"COUNTS/V": "Counts per Volt",
"M": "Displacement in Meters",
"M/S": "Velocity in Meters per Second",
"M/S**2": "Acceleration in Meters per Second per Second",
"RAD/S": "Angular Velocity in Radians per Second",
"V": "Volts",
"A": "Amperes",
"PA": "Pascal",
"C": "Degree Celsius" }
def __init__(self):
self.__num = 0
self.__used = {}
self.__blk = []
def lookup(self, name):
k = self.__used.get(name)
if k is not None:
return k
self.__num += 1
k = self.__num
self.__used[name] = k
desc = self.__units.get(name)
if desc is None:
raise SEEDError, "unknown unit: " + name
b = _Blockette34(key = k, name = name, desc = desc)
self.__blk.append(b)
return k
def output(self, f):
for b in self.__blk:
b.output(f)
class _CommentDict(object):
def __init__(self):
self.__num = 0
self.__used = {}
self.__blk = []
def lookup(self, comment):
k = self.__used.get(comment)
if k is not None:
return k
self.__num += 1
k = self.__num
self.__used[comment] = k
b = _Blockette31(key = k, comment = comment)
self.__blk.append(b)
return k
def output(self, f):
for b in self.__blk:
b.output(f)
class _GenericAbbreviationDict(object):
def __init__(self, inventory):
self.__inventory = inventory
self.__num = 0
self.__used_sensor = {}
self.__used_network = {}
self.__blk = [] # blk33
def lookup_sensor(self, name): # instrument id for blk52
k = self.__used_sensor.get(name)
if k is not None:
return k
self.__num += 1
k = self.__num
self.__used_sensor[name] = k
sensor = self.__inventory.object.get(name)
if sensor is None:
raise SEEDError, "unknown sensor: " + name
self.__blk.append(_Blockette33(k, sensor.description))
return k
def lookup_network(self, code, start): # network id for blk50
k = self.__used_network.get((code, start))
if k is not None:
return k
self.__num += 1
k = self.__num
self.__used_network[(code, start)] = k
net_tp = self.__inventory.network.get(code)
if net_tp is None:
raise SEEDError, "unknown network: %s" % (code,)
netcfg = net_tp.get(start)
if net_tp is None:
raise SEEDError, "unknown network: %s.%s" % \
(code, start.isoformat())
self.__blk.append(_Blockette33(k, netcfg.description))
return k
def output(self, f):
for b in self.__blk:
b.output(f)
class _ResponseContainer(object):
def __init__(self, fac):
self.__fac = fac
def add_sensor(self, name, dev_id, compn):
(x1, x2, sens, sens_freq) = self.__fac._lookup_sensor(name,
dev_id, compn)
self._add_stage(x1, x2)
return (sens, sens_freq)
def add_analogue_paz(self, name):
(x1, x2, gain) = self.__fac._lookup_analogue_paz(name)
self._add_stage(x1, x2)
return gain
def add_digitizer(self, name, dev_id, compn, sample_rate, sample_rate_div):
(x1, x2, x3, rate, gain) = self.__fac._lookup_digitizer(name,
dev_id, compn, sample_rate, sample_rate_div)
self._add_stage(x1, x2, x3)
return (rate, gain)
def add_digital_paz(self, name, input_rate):
(x1, x2, x3, gain) = self.__fac._lookup_digital_paz(name, input_rate)
self._add_stage(x1, x2, x3)
return gain
def add_fir(self, name, input_rate):
(x1, x2, x3, rate, gain) = self.__fac._lookup_fir(name, input_rate)
self._add_stage(x1, x2, x3)
return (rate, gain)
class _Response4xContainer(_ResponseContainer):
def __init__(self, fac):
_ResponseContainer.__init__(self, fac)
self.__blk = _Blockette60(1)
def _add_stage(self, *blkref):
self.__blk.add_stage(*blkref)
def output(self, f):
self.__blk.output(f)
class _Response5xContainer(_ResponseContainer):
def __init__(self, fac):
_ResponseContainer.__init__(self, fac)
self.__stage = 1
self.__blk = []
def _add_stage(self, *blk):
for b in blk:
b.set_stage(self.__stage)
self.__blk.append(b)
self.__stage += 1
def output(self, f):
for b in self.__blk:
b.output(f)
class _Response4xFactory(object):
def __init__(self, inventory, unit_dict):
self.__inventory = inventory
self.__unit_dict = unit_dict
self.__num = 0
self.__used_sensor = {}
self.__used_sensor_calib = {}
self.__used_digitizer = {}
self.__used_digitizer_calib = {}
self.__used_analogue_paz = {}
self.__used_digital_paz = {}
self.__used_fir = {}
self.__used_fir_deci = {}
self.__blk41 = []
self.__blk42 = []
self.__blk43 = []
self.__blk44 = []
self.__blk47 = []
self.__blk48 = []
def new_response(self):
return _Response4xContainer(self)
def _lookup_sensor(self, name, dev_id, compn):
sensor = self.__inventory.object.get(name)
if sensor is None:
raise SEEDError, "unknown sensor: " + name
resp = self.__inventory.object.get(sensor.response)
if resp is None:
raise SEEDError, "cannot find response for sensor " + sensor.name
k1 = self.__used_sensor.get(name)
if k1 is None:
unit = None
try:
unit = sensor.unit
except AttributeError:
pass
if unit:
input_units = self.__unit_dict.lookup(unit)
elif _is_paz_response(resp) and resp.numberOfZeros == 0:
input_units = self.__unit_dict.lookup("M/S**2")
else:
input_units = self.__unit_dict.lookup("M/S")
k1 = self.__num + 1
if _is_paz_response(resp):
if resp.type != "A" and resp.type != "B":
raise SEEDError, "invalid PAZ response type of " + resp.name
b1 = _Blockette43(key = k1,
name = "RS" + name,
type = "A",
input_units = input_units,
output_units = self.__unit_dict.lookup("V"),
norm_fac = resp.normalizationFactor,
norm_freq = resp.normalizationFrequency,
nzeros = resp.numberOfZeros,
zeros = resp.zeros,
npoles = resp.numberOfPoles,
poles = resp.poles)
self.__blk43.append(b1)
elif _is_poly_response(resp):
b1 = _Blockette42(key = k1,
name = "RS" + name,
input_units = input_units,
output_units = self.__unit_dict.lookup("V"),
freq_unit = resp.frequencyUnit,
low_freq = sensor.lowFrequency,
high_freq = sensor.highFrequency,
approx_type = resp.approximationType,
approx_lower_bound = resp.approximationLowerBound,
approx_upper_bound = resp.approximationUpperBound,
approx_error = resp.approximationError,
ncoeff = resp.numberOfCoefficients,
coeff = resp.coefficients)
self.__blk42.append(b1)
self.__num += 1
self.__used_sensor[name] = k1
try:
calib = sensor.calibration[dev_id][compn]
except KeyError:
calib = None
if calib is not None and len(calib) > 0:
calib_list = calib.items()
calib_list.sort()
resp_name = "GS" + sensor.name + "_" + dev_id
gain = calib_list[-1][1].gain
else:
calib_list = []
resp_name = "GS" + sensor.name
gain = resp.gain
dev_id = None
compn = None
k2 = self.__used_sensor_calib.get((name, dev_id, compn))
if k2 is not None:
return (k1, k2, gain, resp.gainFrequency)
k2 = self.__num + 1
b2 = _Blockette48(key = k2,
name = resp_name,
gain = gain,
gain_freq = resp.gainFrequency) #,
#calib_list = calib_list)
self.__blk48.append(b2)
self.__num += 1
self.__used_sensor_calib[(name, dev_id, compn)] = k2
return (k1, k2, gain, resp.gainFrequency)
def _lookup_analogue_paz(self, name):
resp_paz = self.__inventory.object.get(name)
if resp_paz is None:
raise SEEDError, "unknown PAZ response: " + name
k = self.__used_analogue_paz.get(name)
if k is not None:
(k1, k2) = k
return (k1, k2, resp_paz.gain)
#if resp_paz.deci_fac is not None:
# raise SEEDError, "expected analogue response, found digital"
if resp_paz.type != "A" and resp_paz.type != "B":
raise SEEDError, "invalid PAZ response type of " + resp_paz.name
k1 = self.__num + 1
k2 = self.__num + 2
b1 = _Blockette43(key = k1,
name = "RA" + name,
type = "A",
input_units = self.__unit_dict.lookup("V"),
output_units = self.__unit_dict.lookup("V"),
norm_fac = resp_paz.normalizationFactor,
norm_freq = resp_paz.normalizationFrequency,
nzeros = resp_paz.numberOfZeros,
zeros = resp_paz.zeros,
npoles = resp_paz.numberOfPoles,
poles = resp_paz.poles)
b2 = _Blockette48(key = k2,
name = "GA" + name,
gain = resp_paz.gain,
gain_freq = resp_paz.gainFrequency)
self.__blk43.append(b1)
self.__blk48.append(b2)
self.__num += 2
self.__used_analogue_paz[name] = (k1, k2)
return (k1, k2, resp_paz.gain)
def _lookup_digitizer(self, name, dev_id, compn, sample_rate, sample_rate_div):
digi = self.__inventory.object.get(name)
if digi is None:
raise SEEDError, "unknown datalogger: " + name
input_rate = float(sample_rate) / float(sample_rate_div)
try:
stream_deci = digi.decimation[sample_rate][sample_rate_div]
if stream_deci.digitalFilterChain and \
len(stream_deci.digitalFilterChain) > 0:
for f in stream_deci.digitalFilterChain.split():
obj = self.__inventory.object[f]
try: # Need decimationFactor for PAZ???
input_rate *= obj.decimationFactor
except AttributeError:
pass
except KeyError:
pass
k = self.__used_digitizer.get((name, input_rate))
if k is None:
k1 = self.__num + 1
k2 = self.__num + 2
b1 = _Blockette44(key = k1,
name = "RL" + name,
type = "D",
input_units = self.__unit_dict.lookup("V"),
output_units = self.__unit_dict.lookup("COUNTS"))
b2 = _Blockette47(key = k2,
name = "DL" + name,
input_rate = input_rate,
deci_fac = 1,
deci_offset = 0,
delay = 0,
correction = 0)
self.__blk44.append(b1)
self.__blk47.append(b2)
self.__num += 2
self.__used_digitizer[(name, input_rate)] = (k1, k2)
else:
(k1, k2) = k
try:
calib = digi.calibration[dev_id][compn]
except KeyError:
calib = None
if calib is not None and len(calib) > 0:
calib_list = calib.items()
calib_list.sort()
resp_name = "GL" + digi.name + "_" + dev_id
gain = calib_list[-1][1].gain
else:
calib_list = []
resp_name = "GL" + digi.name
gain = digi.gain
dev_id = None
compn = None
k3 = self.__used_digitizer_calib.get((name, dev_id, compn))
if k3 is not None:
return (k1, k2, k3, input_rate, gain)
k3 = self.__num + 1
b3 = _Blockette48(key = k3,
name = resp_name,
gain = gain,
gain_freq = 0) #,
#calib_list = calib_list)
self.__blk48.append(b3)
self.__num += 1
self.__used_digitizer_calib[(name, dev_id, compn)] = k3
return (k1, k2, k3, input_rate, gain)
def _lookup_digital_paz(self, name, input_rate):
resp_paz = self.__inventory.object.get(name)
if resp_paz is None:
raise SEEDError, "unknown PAZ response: " + name
k = self.__used_digital_paz.get(name)
if k is not None:
(k1, k2, k3) = k
return (k1, k2, k3, resp_paz.gain)
#if resp_paz.deci_fac is None:
# raise SEEDError, "expected digital response, found analogue"
if resp_paz.type != "D":
raise SEEDError, "invalid PAZ response type of " + resp_paz.name
k1 = self.__num + 1
k2 = self.__num + 2
k3 = self.__num + 3
b1 = _Blockette43(key = k1,
name = "RD" + name,
type = "D",
input_units = self.__unit_dict.lookup("COUNTS"),
output_units = self.__unit_dict.lookup("COUNTS"),
norm_fac = resp_paz.normalizationFactor,
norm_freq = resp_paz.normalizationFrequency,
nzeros = resp_paz.numberOfZeros,
zeros = resp_paz.zeros,
npoles = resp_paz.numberOfPoles,
poles = resp_paz.poles)
b2 = _Blockette47(key = k2,
name = "DD" + name,
input_rate = input_rate,
deci_fac = 1,
deci_offset = 0,
delay = 0,
correction = 0)
b3 = _Blockette48(key = k3,
name = "GD" + name,
gain = resp_paz.gain,
gain_freq = resp_paz.gainFrequency)
self.__blk43.append(b1)
self.__blk47.append(b2)
self.__blk48.append(b3)
self.__num += 3
self.__used_digital_paz[name] = (k1, k2, k3)
return (k1, k2, k3, resp_paz.gain)
def _lookup_fir(self, name, input_rate):
resp_fir = self.__inventory.object.get(name)
if resp_fir is None:
raise SEEDError, "unknown FIR response: " + name
k = self.__used_fir.get(name)
if k is None:
k1 = self.__num + 1
k3 = self.__num + 2
b1 = _Blockette41(key = k1,
name = "RF" + name,
symmetry = resp_fir.symmetry,
input_units = self.__unit_dict.lookup("COUNTS"),
output_units = self.__unit_dict.lookup("COUNTS"),
ncoeff = resp_fir.numberOfCoefficients,
coeff = resp_fir.coefficients)
b3 = _Blockette48(key = k3,
name = "GF" + name,
gain = resp_fir.gain,
gain_freq = 0)
self.__blk41.append(b1)
self.__blk48.append(b3)
self.__num += 2
self.__used_fir[name] = (k1, k3)
else:
(k1, k3) = k
k2 = self.__used_fir_deci.get((name, input_rate))
if k2 is None:
k2 = self.__num + 1
b2 = _Blockette47(key = k2,
name = "DF" + name + "_" + str(input_rate).replace(".", "_"),
input_rate = input_rate,
deci_fac = resp_fir.decimationFactor,
deci_offset = 0,
delay = resp_fir.delay / input_rate,
correction = resp_fir.correction / input_rate)
self.__blk47.append(b2)
self.__num += 1
self.__used_fir_deci[(name, input_rate)] = k2
return (k1, k2, k3, input_rate / resp_fir.decimationFactor, resp_fir.gain)
def output(self, f):
for b in self.__blk41:
b.output(f)
for b in self.__blk42:
b.output(f)
for b in self.__blk43:
b.output(f)
for b in self.__blk44:
b.output(f)
for b in self.__blk47:
b.output(f)
for b in self.__blk48:
b.output(f)
class _Response5xFactory(object):
def __init__(self, inventory, unit_dict):
self.__inventory = inventory
self.__unit_dict = unit_dict
def new_response(self):
return _Response5xContainer(self)
def _lookup_sensor(self, name, dev_id, compn):
sensor = self.__inventory.object.get(name)
if sensor is None:
raise SEEDError, "unknown sensor: " + name
resp = self.__inventory.object.get(sensor.response)
if resp is None:
raise SEEDError, "cannot find response for sensor " + sensor.name
unit = None
try:
unit = sensor.unit
except AttributeError:
pass
if unit:
input_units = self.__unit_dict.lookup(unit)
elif _is_paz_response(resp) and resp.numberOfZeros == 0:
input_units = self.__unit_dict.lookup("M/S**2")
else:
input_units = self.__unit_dict.lookup("M/S")
if _is_paz_response(resp):
if resp.type != "A" and resp.type != "B":
raise SEEDError, "invalid PAZ response type of " + resp.name
b1 = _Blockette53(type = "A",
input_units = input_units,
output_units = self.__unit_dict.lookup("V"),
norm_fac = resp.normalizationFactor,
norm_freq = resp.normalizationFrequency,
nzeros = resp.numberOfZeros,
zeros = resp.zeros,
npoles = resp.numberOfPoles,
poles = resp.poles)
elif _is_poly_response(resp):
b1 = _Blockette62(input_units = input_units,
output_units = self.__unit_dict.lookup("V"),
freq_unit = resp.frequencyUnit,
low_freq = sensor.lowFrequency,
high_freq = sensor.highFrequency,
approx_type = resp.approximationType,
approx_lower_bound = resp.approximationLowerBound,
approx_upper_bound = resp.approximationUpperBound,
approx_error = resp.approximationError,
ncoeff = resp.numberOfCoefficients,
coeff = resp.coefficients)
try:
calib = sensor.calibration[dev_id][compn]
except KeyError:
calib = None
if calib is not None and len(calib) > 0:
calib_list = calib.items()
calib_list.sort()
gain = calib_list[-1][1].gain
else:
calib_list = []
gain = resp.gain
dev_id = None
compn = None
b2 = _Blockette58(gain = gain,
gain_freq = resp.gainFrequency) #,
#calib_list = calib_list)
return (b1, b2, gain, resp.gainFrequency)
def _lookup_analogue_paz(self, name):
resp_paz = self.__inventory.object.get(name)
if resp_paz is None:
raise SEEDError, "unknown PAZ response: " + name
if resp_paz.type != "A" and resp_paz.type != "B":
raise SEEDError, "invalid PAZ response type of " + resp_paz.name
b1 = _Blockette53(type = "A",
input_units = self.__unit_dict.lookup("V"),
output_units = self.__unit_dict.lookup("V"),
norm_fac = resp_paz.normalizationFactor,
norm_freq = resp_paz.normalizationFrequency,
nzeros = resp_paz.numberOfZeros,
zeros = resp_paz.zeros,
npoles = resp_paz.numberOfPoles,
poles = resp_paz.poles)
b2 = _Blockette58(gain = resp_paz.gain,
gain_freq = resp_paz.gainFrequency)
return (b1, b2, resp_paz.gain)
def _lookup_digitizer(self, name, dev_id, compn, sample_rate, sample_rate_div):
digi = self.__inventory.object.get(name)
if digi is None:
raise SEEDError, "unknown datalogger: " + name
input_rate = float(sample_rate) / float(sample_rate_div)
try:
stream_deci = digi.decimation[sample_rate][sample_rate_div]
if stream_deci.digitalFilterChain and \
len(stream_deci.digitalFilterChain) > 0:
for f in stream_deci.digitalFilterChain.split():
obj = self.__inventory.object[f]
try: # Need decimationFactor for PAZ???
input_rate *= obj.decimationFactor
except AttributeError:
pass
except KeyError:
pass
b1 = _Blockette54(type = "D",
input_units = self.__unit_dict.lookup("V"),
output_units = self.__unit_dict.lookup("COUNTS"))
b2 = _Blockette57(input_rate = input_rate,
deci_fac = 1,
deci_offset = 0,
delay = 0,
correction = 0)
try:
calib = digi.calibration[dev_id][compn]
except KeyError:
calib = None
if calib is not None and len(calib) > 0:
calib_list = calib.items()
calib_list.sort()
gain = calib_list[-1][1].gain
else:
calib_list = []
gain = digi.gain
dev_id = None
compn = None
b3 = _Blockette58(gain = gain,
gain_freq = 0) #,
#calib_list = calib_list)
return (b1, b2, b3, input_rate, gain)
def _lookup_digital_paz(self, name, input_rate):
resp_paz = self.__inventory.object.get(name)
if resp_paz is None:
raise SEEDError, "unknown PAZ response: " + name
if resp_paz.type != "D":
raise SEEDError, "invalid PAZ response type of " + resp_paz.name
b1 = _Blockette53(type = "D",
input_units = self.__unit_dict.lookup("COUNTS"),
output_units = self.__unit_dict.lookup("COUNTS"),
norm_fac = resp_paz.normalizationFactor,
norm_freq = resp_paz.normalizationFrequency,
nzeros = resp_paz.numberOfZeros,
zeros = resp_paz.zeros,
npoles = resp_paz.numberOfPoles,
poles = resp_paz.poles)
b2 = _Blockette57(input_rate = input_rate,
deci_fac = 1,
deci_offset = 0,
delay = 0,
correction = 0)
b3 = _Blockette58(gain = resp_paz.gain,
gain_freq = resp_paz.gainFrequency)
return (b1, b2, b3, resp_paz.gain)
def _lookup_fir(self, name, input_rate):
resp_fir = self.__inventory.object.get(name)
if resp_fir is None:
raise SEEDError, "unknown FIR response: " + name
b1 = _Blockette61(name = "RF" + name,
symmetry = resp_fir.symmetry,
input_units = self.__unit_dict.lookup("COUNTS"),
output_units = self.__unit_dict.lookup("COUNTS"),
ncoeff = resp_fir.numberOfCoefficients,
coeff = resp_fir.coefficients)
b2 = _Blockette57(input_rate = input_rate,
deci_fac = resp_fir.decimationFactor,
deci_offset = 0,
delay = resp_fir.delay / input_rate,
correction = resp_fir.correction / input_rate)
b3 = _Blockette58(gain = resp_fir.gain,
gain_freq = 0)
return (b1, b2, b3, input_rate / resp_fir.decimationFactor, resp_fir.gain)
def output(self, f):
pass
class _Channel(object):
def __init__(self, inventory, strmcfg, format_dict, unit_dict,
gen_dict, resp_container):
loccfg = strmcfg.mySensorLocation
statcfg = loccfg.myStation
netcfg = statcfg.myNetwork
self.__id = (loccfg.code, strmcfg.code, strmcfg.start)
self.__resp_container = resp_container
sensor = inventory.object.get(strmcfg.sensor)
if sensor is None:
raise SEEDError, "unknown sensor: " + strmcfg.sensor
resp = inventory.object.get(sensor.response)
if resp is None:
raise SEEDError, "cannot find response for sensor " + sensor.name
digi = inventory.object.get(strmcfg.datalogger)
if digi is None:
raise SEEDError, "unknown datalogger: " + strmcfg.datalogger
try:
stream_deci = digi.decimation[strmcfg.sampleRateNumerator][strmcfg.sampleRateDenominator]
except KeyError:
raise SEEDError, "cannot find filter chain for stream " + \
str(strmcfg.sampleRateNumerator) + "/" + \
str(strmcfg.sampleRateDenominator) + " of datalogger " + \
digi.name
unit = None
try:
unit = sensor.unit
except AttributeError:
pass
if unit:
signal_units = unit_dict.lookup(unit)
elif _is_paz_response(resp) and resp.numberOfZeros == 0:
signal_units = unit_dict.lookup("M/S**2")
else:
signal_units = unit_dict.lookup("M/S")
if strmcfg.sampleRateNumerator == 0 or \
strmcfg.sampleRateDenominator == 0:
raise SEEDError, "invalid sample rate"
sample_rate = float(strmcfg.sampleRateNumerator) / \
float(strmcfg.sampleRateDenominator)
self.__chan_blk = _Blockette52(loc_id = loccfg.code,
chan_id = strmcfg.code,
instr_id = gen_dict.lookup_sensor(strmcfg.sensor),
comment = "",
signal_units = signal_units,
calibration_units = unit_dict.lookup("A"), # check!
latitude = loccfg.latitude,
longitude = loccfg.longitude,
elevation = loccfg.elevation,
local_depth = strmcfg.depth,
azimuth = strmcfg.azimuth,
dip = strmcfg.dip,
data_format = format_dict.lookup(strmcfg.format),
record_length = 12,
sample_rate = sample_rate,
clock_drift = digi.maxClockDrift / sample_rate,
flags = strmcfg.flags,
start_date = strmcfg.start,
end_date = strmcfg.end)
(sens, sens_freq) = resp_container.add_sensor(strmcfg.sensor,
strmcfg.sensorSerialNumber, strmcfg.sensorChannel)
if stream_deci.analogueFilterChain:
if len(stream_deci.analogueFilterChain) > 0:
for f in stream_deci.analogueFilterChain.split():
obj = inventory.object[f]
if _is_paz_response(obj):
gain = resp_container.add_analogue_paz(f)
sens *= gain
else:
raise SEEDError, "invalid filter type: %s (%s)" % (f, obj.name)
(rate, gain) = resp_container.add_digitizer(strmcfg.datalogger,
strmcfg.dataloggerSerialNumber, strmcfg.dataloggerChannel,
strmcfg.sampleRateNumerator, strmcfg.sampleRateDenominator)
sens *= gain
if stream_deci.digitalFilterChain:
if len(stream_deci.digitalFilterChain) > 0:
for f in stream_deci.digitalFilterChain.split():
obj = inventory.object[f]
if _is_paz_response(obj):
gain = resp_container.add_digital_paz(f, rate)
elif _is_fir_response(obj):
(rate, gain) = resp_container.add_fir(f, rate)
else:
raise SEEDError, "invalid filter type: %s (%s)" % (f, obj.name)
sens *= gain
if sens_freq > rate / 5:
sens_freq = rate / 5
#if sample_rate != rate:
# print digi.name, netcfg.code, statcfg.code, strmcfg.code, "expected sample rate", sample_rate, "actual", rate
self.__sens_blk = _Blockette58(gain = sens,
gain_freq = sens_freq)
def __cmp__(self, other):
if(self.__id < other.__id):
return -1
if(self.__id > other.__id):
return 1
return 0
def output(self, f, vol_start, vol_end):
self.__chan_blk.set_vol_span(vol_start, vol_end)
self.__chan_blk.output(f)
self.__resp_container.output(f)
self.__sens_blk.output(f)
class _Station(object):
def __init__(self, inventory, statcfg, format_dict, unit_dict,
comment_dict, gen_dict, resp_fac):
self.__inventory = inventory
self.__statcfg = statcfg
self.__format_dict = format_dict
self.__unit_dict = unit_dict
self.__comment_dict = comment_dict
self.__gen_dict = gen_dict
self.__resp_fac = resp_fac
self.__recno = 0
self.__id = (statcfg.myNetwork.code, statcfg.myNetwork.start,
statcfg.code, statcfg.start)
self.__channel = {}
self.__comment_blk = []
self.__stat_blk = _Blockette50(stat_code = statcfg.code,
latitude = statcfg.latitude,
longitude = statcfg.longitude,
elevation = statcfg.elevation,
site_name = statcfg.description,
net_id = gen_dict.lookup_network(statcfg.myNetwork.code,
statcfg.myNetwork.start),
net_code = statcfg.myNetwork.code,
start_date = statcfg.start,
end_date = statcfg.end)
def __cmp__(self, other):
if(self.__id < other.__id):
return -1
if(self.__id > other.__id):
return 1
return 0
def add_chan(self, strmcfg):
loccfg = strmcfg.mySensorLocation
if (loccfg.code, strmcfg.code, strmcfg.start) in \
self.__channel:
return
self.__channel[(loccfg.code, strmcfg.code, strmcfg.start)] = \
_Channel(self.__inventory, strmcfg, self.__format_dict,
self.__unit_dict, self.__gen_dict, self.__resp_fac.new_response())
def add_comment(self, qccfg):
self.__comment_blk.append(_Blockette51(start_time = qccfg.start,
end_time = qccfg.end,
comment = self.__comment_dict.lookup(qccfg.message)))
def get_id(self):
return self.__id
def get_recno(self):
return self.__recno
def output(self, f, vol_start, vol_end):
self.__recno = f.get_recno()
self.__stat_blk.output(f)
for b in self.__comment_blk:
b.output(f)
chan_list = self.__channel.values()
chan_list.sort()
for c in chan_list:
c.output(f, vol_start, vol_end)
f.flush()
class _TimeSeries(object):
def __init__(self, span, net_code, stat_code, loc_id, chan_id,
start_time, end_time, recno):
self.__span = span
self.__net_code = net_code
self.__stat_code = stat_code
self.__loc_id = loc_id
self.__chan_id = chan_id
self.__start_time = start_time
self.__start_recno = recno
self.__end_time = end_time
self.__end_recno = recno
def extend(self, start_time, end_time, recno):
if start_time < self.__start_time:
self.__start_time = start_time
if end_time > self.__end_time:
self.__end_time = end_time
self.__end_recno = recno
self.__span.extend(start_time, end_time)
def get_series_data(self):
return (self.__net_code, self.__stat_code, self.__loc_id,
self.__chan_id, self.__start_time, self.__end_time)
def output(self, f, data_start):
b = _Blockette74(self.__net_code, self.__stat_code, self.__loc_id,
self.__chan_id, self.__start_time, self.__start_recno + data_start,
self.__end_time, self.__end_recno + data_start)
b.output(f)
class _Timespan(object):
def __init__(self):
self.__start_time = None
self.__end_time = None
self.__recno = 0
self.__series = []
def new_time_series(self, net, sta, loc, cha, start_time, end_time, recno):
if len(self.__series) == 0:
self.__start_time = start_time
self.__end_time = end_time
else:
self.extend(start_time, end_time)
ts = _TimeSeries(self, net, sta, loc, cha, start_time, end_time, recno)
self.__series.append(ts)
return ts
def overlap(self, start_time, end_time):
return self.__start_time - _min_ts_gap <= start_time <= self.__end_time + _min_ts_gap or \
self.__start_time - _min_ts_gap <= end_time <= self.__end_time + _min_ts_gap
def extend(self, start_time, end_time):
if start_time < self.__start_time:
self.__start_time = start_time
if end_time > self.__end_time:
self.__end_time = end_time
def get_span_data(self):
return (self.__start_time, self.__end_time, self.__recno)
def get_series_data(self):
return [ s.get_series_data() for s in self.__series ]
def output_index(self, f, data_start):
self.__recno = f.get_recno()
b = _Blockette70("P", self.__start_time, self.__end_time)
b.output(f)
for s in self.__series:
s.output(f, data_start)
f.flush()
class _WaveformData(object):
def __init__(self):
self.__fd = TemporaryFile()
self.__recno = 0
self.__cur_rec = None
self.__cur_series = None
self.__span = []
def __get_time_series(self, rec):
for s in self.__span:
if s.overlap(rec.begin_time, rec.end_time):
break
else:
s = _Timespan()
self.__span.append(s)
return s.new_time_series(rec.net, rec.sta, rec.loc, rec.cha,
rec.begin_time, rec.end_time, self.__recno)
def add_data(self, rec):
if self.__cur_rec is None:
self.__cur_rec = rec
self.__cur_series = self.__get_time_series(rec)
#if rec.encoding != 10 and rec.encoding != 11:
# logs.warning("%s %s %s %s cannot merge records with encoding %d" % \
# (rec.net, rec.sta, rec.loc, rec.cha, rec.encoding))
return
if self.__cur_rec.net == rec.net and self.__cur_rec.sta == rec.sta and \
self.__cur_rec.loc == rec.loc and self.__cur_rec.cha == rec.cha:
contiguous = True
if rec.encoding == 10 or rec.encoding == 11:
if abs(rec.begin_time - self.__cur_rec.end_time) > _min_data_gap(rec.fsamp):
contiguous = False
if rec.X_minus1 is None:
logs.warning("%s %s %s %s X[-1] not defined" %
(rec.net, rec.sta, rec.loc, rec.cha))
contiguous = False
else:
contiguous = False
if self.__cur_rec.fsamp != rec.fsamp:
logs.warning("%s %s %s %s sample rate changed from %f to %f" %
(rec.net, rec.sta, rec.loc, rec.cha, self.__cur_rec.fsamp,
rec.fsamp))
contiguous = False
if self.__cur_rec.encoding != rec.encoding:
logs.warning("%s %s %s %s encoding changed from %d to %d" %
(rec.net, rec.sta, rec.loc, rec.cha, self.__cur_rec.encoding,
rec.encoding))
contiguous = False
if contiguous and self.__cur_rec.Xn != rec.X_minus1:
logs.warning("%s %s %s %s non-contiguous data: %d != %d" %
(rec.net, rec.sta, rec.loc, rec.cha, self.__cur_rec.Xn,
rec.X_minus1))
contiguous = False
if contiguous and self.__cur_rec.size + rec.nframes * 64 <= (1 << _RECLEN_EXP):
self.__cur_rec.merge(rec)
else:
self.__recno += 1
if abs(rec.begin_time - self.__cur_rec.end_time) <= _min_ts_gap:
self.__cur_series.extend(rec.begin_time, rec.end_time,
self.__recno)
else:
self.__cur_series = self.__get_time_series(rec)
self.__cur_rec.write(self.__fd, _RECLEN_EXP)
self.__cur_rec = rec
else:
self.__recno += 1
self.__cur_series = self.__get_time_series(rec)
self.__cur_rec.write(self.__fd, _RECLEN_EXP)
self.__cur_rec = rec
def get_series_data(self):
return sum([ s.get_series_data() for s in self.__span ], [])
def output_vol(self, f):
b = _Blockette12()
for s in self.__span:
b.add_span(*s.get_span_data())
b.output(f)
def output_index(self, f, data_start):
for s in self.__span:
s.output_index(f, data_start)
def output_data(self, fd, data_start):
if self.__cur_rec is not None:
self.__cur_rec.write(self.__fd, _RECLEN_EXP)
self.__cur_rec = None
self.__cur_series = None
self.__fd.seek(0)
#copyfileobj(self.__fd, fd)
i = 0
for rec in mseed.Input(self.__fd):
rec.recno = data_start + i
rec.write(fd, _RECLEN_EXP)
i += 1
self.__fd.close()
class _RecordBuilder(object):
def __init__(self, type, fd):
self.__recno = 1
self.__type = type
self.__fd = fd
self.__buf = None
def flush(self):
if self.__buf != None:
self.__buf += ((1 << _RECLEN_EXP) - len(self.__buf)) * " "
self.__fd.write(self.__buf)
self.__buf = None
def reset(self, type, fd, recno = None):
self.flush()
self.__type = type
self.__fd = fd
if recno is not None:
self.__recno = recno
def get_recno(self):
return self.__recno
def write_blk(self, s):
if self.__buf == None:
self.__buf = "%06d%c " % (self.__recno, self.__type)
self.__recno += 1
b = 0
while len(s) - b > (1 << _RECLEN_EXP) - len(self.__buf):
e = b + (1 << _RECLEN_EXP) - len(self.__buf)
self.__buf += s[b:e]
self.__fd.write(self.__buf)
self.__buf = "%06d%c*" % (self.__recno, self.__type)
self.__recno += 1
b = e
self.__buf += s[b:]
if len(self.__buf) > (1 << _RECLEN_EXP) - 8:
self.flush()
class SEEDVolume(object):
def __init__(self, inventory, organization, label, resp_dict=True):
self.__inventory = inventory
self.__organization = organization
self.__label = label
self.__vol_start_time = datetime.datetime(2100,1,1,0,0,0)
self.__vol_end_time = datetime.datetime(1971,1,1,0,0,0)
self.__format_dict = _FormatDict()
self.__unit_dict = _UnitDict()
self.__comment_dict = _CommentDict()
self.__gen_dict = _GenericAbbreviationDict(inventory)
self.__station = {}
self.__waveform_data = None
if resp_dict:
self.__resp_fac = _Response4xFactory(inventory, self.__unit_dict)
else:
self.__resp_fac = _Response5xFactory(inventory, self.__unit_dict)
def add_chan(self, net_code, stat_code, loc_id, chan_id, start_time, end_time, strict=False):
found = False
net_tp = self.__inventory.network.get(net_code)
if net_tp is not None:
for netcfg in net_tp.itervalues():
# if _cmptime(start_time, netcfg.end) <= 0 and \
# _cmptime(end_time, netcfg.start) >= 0:
sta_tp = netcfg.station.get(stat_code)
if sta_tp is not None:
for statcfg in sta_tp.itervalues():
# if _cmptime(start_time, statcfg.end) <= 0 and \
# _cmptime(end_time, statcfg.start) >= 0:
sta = self.__station.get((net_code, netcfg.start, stat_code, statcfg.start))
if sta is None:
sta = _Station(self.__inventory, statcfg, self.__format_dict,
self.__unit_dict, self.__comment_dict, self.__gen_dict,
self.__resp_fac)
self.__station[(net_code, netcfg.start, stat_code, statcfg.start)] = sta
loc_tp = statcfg.sensorLocation.get(loc_id)
if loc_tp is not None:
for loccfg in loc_tp.itervalues():
# if _cmptime(start_time, strmcfg.end) <= 0 and \
# _cmptime(end_time, strmcfg.start) >= 0:
strm_tp = loccfg.stream.get(chan_id)
if strm_tp is not None:
for strmcfg in strm_tp.itervalues():
if _cmptime(start_time, strmcfg.end) <= 0 and \
_cmptime(end_time, strmcfg.start) >= 0:
if _cmptime(start_time, self.__vol_start_time) < 0:
self.__vol_start_time = start_time
if _cmptime(end_time, self.__vol_end_time) > 0:
self.__vol_end_time = end_time
sta.add_chan(strmcfg)
found = True
if not found:
if strict:
raise SEEDError, "cannot find %s %s %s %s %s %s" % \
(net_code, stat_code, loc_id, chan_id, start_time, end_time)
else:
logs.warning("cannot find %s %s %s %s %s %s" %
(net_code, stat_code, loc_id, chan_id, start_time, end_time))
def add_station_comment(self, net_code, stat_code, start_time, end_time, comment, strict=False):
found = False
net_tp = self.__inventory.network.get(net_code)
if net_tp is not None:
for netcfg in net_tp.itervalues():
# if _cmptime(start_time, netcfg.end) <= 0 and \
# _cmptime(end_time, netcfg.start) >= 0:
sta_tp = netcfg.station.get(stat_code)
if sta_tp is not None:
for statcfg in sta_tp.itervalues():
# if _cmptime(start_time, statcfg.end) <= 0 and \
# _cmptime(end_time, statcfg.start) >= 0:
sta = self.__station.get((net_code, netcfg.start, stat_code, statcfg.start))
if sta is None:
sta = _Station(self.__inventory, statcfg, self.__format_dict,
self.__unit_dict, self.__gen_dict, self.__resp_fac)
self.__station[(net_code, netcfg.start, stat_code, statcfg.start)] = sta
sta.add_comment(start_time, end_time, comment)
found = True
if not found:
if strict:
raise SEEDError, "cannot find %s %s %s %s %s %s" % \
(net_code, stat_code, loc_id, chan_id, start_time, end_time)
else:
logs.warning("cannot find %s %s %s %s %s %s" %
(net_code, stat_code, loc_id, chan_id, start_time, end_time))
def add_data(self, rec):
if self.__waveform_data is None:
self.__waveform_data = _WaveformData()
self.__waveform_data.add_data(rec)
def __output_vol(self, vol_creat_time, sta_list, rb):
b1 = _Blockette10(record_length = _RECLEN_EXP,
start_time = self.__vol_start_time,
end_time = self.__vol_end_time,
vol_time = vol_creat_time,
organization = self.__organization,
label = self.__label)
b2 = _Blockette11()
for sta in sta_list:
(net_code, net_start, stat_code, stat_start) = sta.get_id()
b2.add_station(stat_code, sta.get_recno())
b1.output(rb)
b2.output(rb)
if self.__waveform_data is not None:
self.__waveform_data.output_vol(rb)
rb.flush()
def output(self, dest, strict=False):
vol_creat_time = datetime.datetime.utcnow()
if self.__waveform_data is not None:
for (net_code, stat_code, loc_id, chan_id, start_time, end_time) in \
self.__waveform_data.get_series_data():
self.add_chan(net_code, stat_code, loc_id, chan_id, start_time, \
end_time, strict)
sta_list = self.__station.values()
sta_list.sort()
if isinstance(dest, basestring):
fd = file(dest, "w")
elif hasattr(dest, "write"):
fd = dest
else:
raise TypeError, "invalid file object"
try:
filename = fd.name
except AttributeError:
filename = '<???>'
rb = _RecordBuilder("V", fd)
self.__output_vol(vol_creat_time, sta_list, rb)
rb.reset("A", fd)
self.__format_dict.output(rb)
self.__gen_dict.output(rb)
self.__unit_dict.output(rb)
self.__resp_fac.output(rb)
rb.flush()
rb.reset("S", fd)
for sta in sta_list:
sta.output(rb, self.__vol_start_time, self.__vol_end_time)
if self.__waveform_data is not None:
index_start = rb.get_recno()
rb.reset("T", fd)
self.__waveform_data.output_index(rb, 0)
data_start = rb.get_recno()
rb.reset("T", fd, index_start)
fd.seek((1 << _RECLEN_EXP) * (index_start - 1), 0)
self.__waveform_data.output_index(rb, data_start)
self.__waveform_data.output_data(fd, data_start)
fd.seek(0, 0)
rb.reset("V", fd, 1)
self.__output_vol(vol_creat_time, sta_list, rb)
if isinstance(dest, basestring):
fd.close()
| IPGP/webobs | CODE/bin/arclinkfetch/share/seiscomp3af/seiscomp/fseed.py | Python | gpl-3.0 | 82,130 |
"""Managing infections for deployments"""
from . import subset_sum
from .infection import InfectionControl, Infector, CDC
__all__ = ["InfectionControl", "Infector", "CDC", "subset_sum"]
| stephen-khan/feature_infection | feature_infection/__init__.py | Python | unlicense | 188 |
#!/usr/bin/env python
#
# Copyright (C) 2009, 2010, 2011 Rickard Lindberg, Roger Lindberg
#
# This file is part of Timeline.
#
# Timeline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Timeline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Timeline. If not, see <http://www.gnu.org/licenses/>.
"""
Script that imports po files from a tar archive exported from Launchpad.
"""
from subprocess import Popen
import re
import shutil
import os.path
import glob
import tempfile
import sys
if len(sys.argv) != 2:
print("Usage: python import-po-from-launchpad-export.py /path/to/launchpad-export.tar.gz")
raise SystemExit()
# extract from
archive_path = sys.argv[1]
print archive_path
# extract to
tmp_dir = tempfile.mkdtemp()
print tmp_dir
# extract
Popen(["tar", "xvvz", "-C", tmp_dir, "--file", archive_path]).wait()
# copy po-files
for pofile in glob.glob(os.path.join(tmp_dir, "timeline", "*.po")):
dest_name = re.search(r".*-(.*.po)", pofile).group(1)
dest = os.path.join(os.path.join(os.path.dirname(__file__), dest_name))
shutil.copy(pofile, dest)
print dest
# remove tmp dir
shutil.rmtree(tmp_dir)
| linostar/timeline-clone | po/import-po-from-launchpad-export.py | Python | gpl-3.0 | 1,586 |
#!/usr/bin/env python
import os
import django
from os import path
from django.conf import settings
from django.core.management import call_command
def main():
if not settings.configured:
module_root = path.dirname(path.realpath(__file__))
settings.configure(
DEBUG = False,
INSTALLED_APPS = (
'fluent_contents',
),
)
if django.VERSION >= (1,7):
django.setup()
makemessages()
def makemessages():
os.chdir('fluent_contents')
call_command('makemessages', locale=('en', 'nl'), verbosity=1)
if __name__ == '__main__':
main()
| ixc/django-fluent-contents | makemessages.py | Python | apache-2.0 | 634 |
""" Management command to cleanup old waiting enrollments """
from __future__ import absolute_import, unicode_literals
import logging
from django.core.management.base import BaseCommand
from ... import tasks
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Deletes enrollments not tied to a user that have not been modified
for at least 60 days.
Example usage:
$ ./manage.py lms expire_waiting_enrollments
"""
help = 'Remove expired enrollments that have not been linked to a user.'
WAITING_ENROLLMENTS_EXPIRATION_DAYS = 60
def add_arguments(self, parser):
parser.add_argument(
'--expiration_days',
help='Number of days before a waiting enrollment is considered expired',
default=self.WAITING_ENROLLMENTS_EXPIRATION_DAYS,
type=int
)
def handle(self, *args, **options):
expiration_days = options.get('expiration_days')
logger.info('Deleting waiting enrollments unmodified for %s days', expiration_days)
tasks.expire_waiting_enrollments(expiration_days)
| ESOedX/edx-platform | lms/djangoapps/program_enrollments/management/commands/expire_waiting_enrollments.py | Python | agpl-3.0 | 1,114 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import os
import random
import alembic.autogenerate
import alembic.migration
import alembic.operations
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
from oslo_db.sqlalchemy import utils as db_utils
from oslo_log import log as logging
import sqlalchemy
from sqlalchemy.sql import null
from nova.db.sqlalchemy import api as db_session
from nova.db.sqlalchemy import models
from nova import exception
from nova.i18n import _
INIT_VERSION = {}
INIT_VERSION['main'] = 215
INIT_VERSION['api'] = 0
_REPOSITORY = {}
LOG = logging.getLogger(__name__)
def get_engine(database='main'):
if database == 'main':
return db_session.get_engine()
if database == 'api':
return db_session.get_api_engine()
def _db_sync_locked(repository):
engine = get_engine()
try:
migrate_version = db_utils.get_table(engine, repository.version_table)
except sqlalchemy.exc.NoSuchTableError:
# Table doesn't exist yet, cannot be locked
return False
row = sqlalchemy.sql.select([migrate_version]).\
where(migrate_version.c.repository_id == repository.id).\
execute().fetchone()
if not row or 'locked' not in row:
# 'db expand' will create row if missing and it will add locked
# column if missing. If 'db expand' hasn't been run, then the
# repo can't be locked
return False
return row['locked']
def _set_db_sync_lock(repository, locked):
locked = locked and 1 or 0
engine = get_engine()
migrate_version = db_utils.get_table(engine, repository.version_table)
return migrate_version.update().\
where(migrate_version.c.repository_id == repository.id).\
values(locked=locked).execute().rowcount
def _set_db_sync_version(repository, version):
engine = get_engine()
migrate_version = db_utils.get_table(engine, repository.version_table)
migrate_version.update().\
where(migrate_version.c.repository_id == repository.id).\
values(version=version).execute()
def db_sync(version=None, database='main'):
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.NovaException(_("version should be an integer"))
current_version = db_version(database)
repository = _find_migrate_repo(database)
if _db_sync_locked(repository):
msg = _("Cannot run 'db sync' until 'db contract' is run")
raise exception.DatabaseMigrationError(reason=msg)
if version is None or version > current_version:
return versioning_api.upgrade(get_engine(database), repository,
version)
else:
return versioning_api.downgrade(get_engine(database), repository,
version)
def db_version(database='main'):
repository = _find_migrate_repo(database)
try:
return versioning_api.db_version(get_engine(database), repository)
except versioning_exceptions.DatabaseNotControlledError as exc:
meta = sqlalchemy.MetaData()
engine = get_engine(database)
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(INIT_VERSION[database], database)
return versioning_api.db_version(get_engine(database), repository)
else:
LOG.exception(exc)
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exception.NovaException(
_("Upgrade DB using Essex release first."))
def db_initial_version(database='main'):
return INIT_VERSION[database]
def _ignore_table(name):
# Anything starting with dump_ is a backup from previous
# migration scripts.
return name.startswith('dump_')
def _include_object(object_, name, type_, reflected, compare_to):
if type_ == 'table':
return not _ignore_table(name)
return True
_REFLECT_CACHE = {}
def _compare_type(context, inspector_column, metadata_column,
inspector_type, metadata_type):
# Types can be silently modified on the server side. For instance,
# under MySQL, the "BOOL" type is an alias to "TINYINT". As a result,
# creating a column with "BOOL" will reflect as "TINYINT". Instead of
# manually maintaining a mapping of types per database engine, let's
# create a temporary table with metadata_type, reflect that, then
# compare the two reflected values to see if they match.
# Check with the alembic implementation first. I haven't seen a false
# negative yet (where it considers the types the same when they really
# aren't), but there are some false positives dealing with aliasing
# of types and some metadata.
if not context.impl.compare_type(inspector_column, metadata_column):
return False
key = str(metadata_type)
reflected_type = _REFLECT_CACHE.get(key)
if reflected_type is None:
conn = context.bind.connect()
tmp_meta = sqlalchemy.MetaData(conn)
randchars = ''.join(chr(random.randint(0, 255)) for _ in xrange(5))
tmp_table_name = 'reflect_' + base64.b32encode(randchars).lower()
tmp_table = sqlalchemy.Table(tmp_table_name, tmp_meta,
sqlalchemy.Column('a', metadata_type),
prefixes=['TEMPORARY'])
tmp_table.create()
inspect = sqlalchemy.inspect(conn)
columns = inspect.get_columns(tmp_table_name)
tmp_table.drop()
column_types = {c['name']: c['type'] for c in columns}
reflected_type = column_types['a']
_REFLECT_CACHE[key] = reflected_type
# Now compare the types
if inspector_type.__class__ is not reflected_type.__class__:
return True
# And compare any specific attributes about this type
# TODO(johannes): This should handle dialect specific attributes
# (eg charset, collation, etc on MySQL VARCHAR type). This is
# probably best done in alembic correctly
for attr in ('length', 'display_width'):
if (hasattr(inspector_type, attr) and
getattr(inspector_type, attr) != getattr(reflected_type, attr)):
return True
return False
def _create_migration_context(as_sql=False):
engine = get_engine()
opts = {
'include_object': _include_object,
'compare_type': _compare_type,
'as_sql': as_sql,
}
return alembic.migration.MigrationContext.configure(engine, opts=opts)
class OperationBase(object):
# Be conservative by default
desired_phase = 'migrate'
removes = False
def __init__(self):
self.provides = set()
self.depends = set()
self.conflicts = set()
class AddTable(OperationBase):
# Always safe since no old code should be using this table
desired_phase = 'expand'
def __init__(self, table):
super(AddTable, self).__init__()
self.table = table
self.provides = set(('column', table.name, c.name)
for c in table.columns)
def execute(self, ddlop):
# Need to copy columns so they are unlinked from metadata table
columns = [c.copy() for c in self.table.columns]
ddlop.create_table(self.table.name, *columns,
mysql_engine='InnoDB',
mysql_charset='utf8')
def __repr__(self):
return '<AddTable table_name=%s>' % self.table.name
class DropTable(OperationBase):
# Always safe since no new code should be using this table
desired_phase = 'contract'
removes = True
def __init__(self, table):
super(DropTable, self).__init__()
self.table = table
provides = set(('column', table.name, c.name)
for c in table.columns)
# Indexes and constraints are implicitly dropped for a DROP TABLE.
# Add the indexcol so foreign keys get ordered correctly
for index in table.indexes:
column_names = [c.name for c in index.columns]
provides.add(('indexcol', table.name, column_names[0]))
for constraint in table.constraints:
if not isinstance(constraint, (sqlalchemy.UniqueConstraint,
sqlalchemy.PrimaryKeyConstraint)):
continue
column_names = [c.name for c in constraint.columns]
# SQLAlchemy can add a PrimaryKeyConstraint even if one
# doesn't exist. In that case, column_names will be empty
if column_names:
provides.add(('indexcol', table.name, column_names[0]))
self.provides = provides
def execute(self, ddlop):
ddlop.drop_table(self.table.name)
def __repr__(self):
return '<DropTable table_name=%r>' % self.table.name
class AddColumn(OperationBase):
def __init__(self, table_name, column, desired_phase=None):
super(AddColumn, self).__init__()
self.table_name = table_name
self.column = column
if desired_phase:
self.desired_phase = desired_phase
self.provides = set([('column', table_name, column.name)])
def execute(self, ddlop):
column = self.column.copy()
ddlop.add_column(self.table_name, column)
def __repr__(self):
return ('<AddColumn column={table_name=%s column_name=%s type=%r}>' %
(self.table_name, self.column.name, self.column.type))
class AlterColumn(OperationBase):
def __init__(self, table_name, column_name, args):
super(AlterColumn, self).__init__()
self.table_name = table_name
self.column_name = column_name
self.args = args
self.provides = [('column', table_name, column_name)]
# Cannot alter column with foreign key
self.conflicts = [('fkcol', table_name, column_name)]
def execute(self, ddlop):
ddlop.alter_column(self.table_name, self.column_name, **self.args)
def __repr__(self):
return ('<AlterColumn table_name=%s column_name=%s args=%r>' %
(self.table_name, self.column_name, self.args))
class DropColumn(OperationBase):
# Always online safe since no new code should be using this column
desired_phase = 'contract'
removes = True
def __init__(self, table_name, column):
super(DropColumn, self).__init__()
self.table_name = table_name
self.column = column
self.provides = set([('column', table_name, column.name)])
def execute(self, ddlop):
ddlop.drop_column(self.table_name, self.column.name)
def __repr__(self):
return ('<DropColumn column={table_name=%s column_name=%s}>' %
(self.table_name, self.column.name))
class AddIndex(OperationBase):
def __init__(self, index, args):
super(AddIndex, self).__init__()
table_name = index.table.name
column_names = [c.name for c in index.columns]
self.index = index
self.args = args
# Adding a unique index isn't semantically safe since code may
# not be aware of the new constraint on the column(s).
self.desired_phase = 'migrate' if index.unique else 'expand'
self.provides = set([
('index', table_name, index.name),
('indexcol', table_name, column_names[0]),
])
# Columns need to exist before index is created
self.depends = set(('column', table_name, name)
for name in column_names)
def execute(self, ddlop):
name = self.index.name
table_name = self.index.table.name
column_names = [c.name for c in self.index.columns]
ddlop.create_index(name, table_name, column_names,
unique=self.index.unique, **self.args)
def __repr__(self):
index = self.index
column_names = [c.name for c in index.columns]
return ('<AddIndex index={table_name=%s name=%s column_names=(%s)} '
'args=%r>' % (index.table.name, index.name,
', '.join(column_names), self.args))
class DropIndex(OperationBase):
removes = True
def __init__(self, index):
super(DropIndex, self).__init__()
self.index = index
# This is used for conflicts
self.column_names = [c.name for c in index.columns]
# Removing a unique index should happen in migrate since
# new code may assume there isn't any restriction anymore.
self.desired_phase = 'migrate' if index.unique else 'contract'
table_name = index.table.name
self.provides = set([
('index', table_name, index.name),
('indexcol', table_name, self.column_names[0]),
])
# Can't remove an index if there is a FK potentially using it
self.conflicts = set(('fkcol', table_name, name)
for name in self.column_names)
def execute(self, ddlop):
ddlop.drop_index(self.index.name, self.index.table.name)
def __repr__(self):
index = self.index
return ('<DropIndex index={table_name=%s name=%s}>' %
(index.table.name, index.name))
class AddUniqueConstraint(OperationBase):
def __init__(self, uc, desired_phase=None):
super(AddUniqueConstraint, self).__init__()
self.uc = uc
if desired_phase:
self.desired_phase = desired_phase
table = uc.table
column_names = [c.name for c in uc.columns]
self.provides = set([
# So drop_constraint is ordered before against add_uc
('uc', table.name, uc.name),
('indexcol', table.name, column_names[0]),
])
# Columns need to exist before constraint is created
self.depends = set(('column', table.name, c.name)
for c in uc.columns)
def execute(self, ddlop):
uc = self.uc
table = uc.table
column_names = [c.name for c in uc.columns]
ddlop.create_unique_constraint(uc.name, table.name, column_names)
def __repr__(self):
uc = self.uc
column_names = [c.name for c in uc.columns]
return ('<AddUniqueConstraint uc={name=%s table_name=%s '
'column_names=(%s)}>' % (uc.name, uc.table.name,
', '.join(column_names)))
class DropUniqueConstraint(OperationBase):
removes = True
def __init__(self, uc):
super(DropUniqueConstraint, self).__init__()
self.uc = uc
table = uc.table
# So this gets ordered against Add correctly
self.provides = set([('uc', table.name, uc.name)])
# Should be scheduled before any columns dropped
self.depends = set(('column', table.name, c.name)
for c in uc.columns)
def execute(self, ddlop):
uc = self.uc
table = uc.table
ddlop.drop_constraint(uc.name, table.name, type_='unique')
def __repr__(self):
uc = self.uc
column_names = [c.name for c in uc.columns]
return ('<DropUniqueConstraint uc={name=%s table_name=%s '
'column_names=(%s)}>' % (uc.name, uc.table.name,
', '.join(column_names)))
class AddForeignKey(OperationBase):
def __init__(self, fkc, desired_phase=None):
super(AddForeignKey, self).__init__()
self.fkc = fkc
if desired_phase:
self.desired_phase = desired_phase
fk = fkc.elements[0]
src_table_name = fk.parent.table.name
ref_table_name = fk.column.table.name
provides = set([('fk', src_table_name, fkc.name)])
depends = set([
('indexcol', src_table_name, fk.parent.name),
('indexcol', ref_table_name, fk.column.name),
])
for fk in fkc.elements:
provides.update([
('fkcol', src_table_name, fk.parent.name),
('fkcol', ref_table_name, fk.column.name),
])
depends.update([
('column', src_table_name, fk.parent.name),
('column', ref_table_name, fk.column.name),
])
self.provides = provides
self.depends = depends
def execute(self, ddlop):
fkc = self.fkc
src_table_name = fkc.elements[0].parent.table.name
src_column_names = [fk.parent.name for fk in fkc.elements]
ref_table_name = fkc.elements[0].column.table.name
ref_column_names = [fk.column.name for fk in fkc.elements]
ddlop.create_foreign_key(fkc.name,
src_table_name, ref_table_name,
src_column_names, ref_column_names)
def __repr__(self):
fkc = self.fkc
src_table_name = fkc.elements[0].parent.table.name
src_column_names = [fk.parent.name for fk in fkc.elements]
ref_table_name = fkc.elements[0].column.table.name
ref_column_names = [fk.column.name for fk in fkc.elements]
return ('<AddForeignKey fk={name=%r src_columns=%s.(%s) '
'ref_columns=%s.(%s)}>' % (fkc.name, src_table_name,
', '.join(src_column_names), ref_table_name,
', '.join(ref_column_names)))
class DropForeignKey(OperationBase):
removes = True
def __init__(self, fkc, desired_phase=None):
super(DropForeignKey, self).__init__()
self.fkc = fkc
if desired_phase:
self.desired_phase = desired_phase
fk = fkc.elements[0]
src_table_name = fk.parent.table.name
ref_table_name = fk.column.table.name
provides = set([('fk', src_table_name, fkc.name)])
depends = set([
('indexcol', src_table_name, fk.parent.name),
('indexcol', ref_table_name, fk.column.name),
])
for fk in fkc.elements:
provides.update([
('fkcol', src_table_name, fk.parent.name),
('fkcol', ref_table_name, fk.column.name),
])
self.provides = provides
self.depends = depends
def execute(self, ddlop):
fkc = self.fkc
table = fkc.table
ddlop.drop_constraint(fkc.name, table.name, type_='foreignkey')
def __repr__(self):
fkc = self.fkc
src_table_name = fkc.elements[0].parent.table.name
src_column_names = [fk.parent.name for fk in fkc.elements]
ref_table_name = fkc.elements[0].column.table.name
ref_column_names = [fk.column.name for fk in fkc.elements]
return ('<DropForeignKey fkc={name=%s src_columns=%s.(%s) '
'ref_columns=%s.(%s)}>' %
(fkc.name, src_table_name, ', '.join(src_column_names),
ref_table_name, ', '.join(ref_column_names)))
def _table_fk_constraints(table):
return [c for c in table.constraints
if isinstance(c, sqlalchemy.ForeignKeyConstraint)]
def _fkc_matches_key(metadata, ckey):
for table in metadata.tables.values():
for fkc in _table_fk_constraints(table):
fk = fkc.elements[0]
src_table_name = fk.parent.table.name
ref_table_name = fk.column.table.name
for fk in fkc.elements:
for key in [('fkcol', src_table_name, fk.parent.name),
('fkcol', ref_table_name, fk.column.name)]:
if key == ckey:
yield fkc
break
def _compare_fkc(afkc, bfkc):
# Comparing name is best, but new foreign key constraints might not
# have a name set yet
if afkc.name != bfkc.name:
return False
afk = afkc.elements[0]
bfk = bfkc.elements[0]
if afk.parent.table.name != bfk.parent.table.name:
return False
acolumns = [(fk.parent.name, fk.column.name) for fk in afkc.elements]
bcolumns = [(fk.parent.name, fk.column.name) for fk in bfkc.elements]
if acolumns != bcolumns:
return False
return True
class Converter(object):
def _handle_add_table(self, table):
# ('add_table', Table)
# alembic can take some operations as part of op.create_table()
# but not all. We also want to separate foreign keys since they
# can potentially create a dependency on another op we haven't
# seen yet. As a result, this one diff from alembic might be
# split up into multiple ops we track and apply in different
# phases.
tblop = AddTable(table)
yield tblop
for uc in [c for c in table.constraints
if isinstance(c, sqlalchemy.UniqueConstraint)]:
yield AddUniqueConstraint(uc, desired_phase=tblop.desired_phase)
for fkc in _table_fk_constraints(table):
yield AddForeignKey(fkc, desired_phase=tblop.desired_phase)
def _handle_remove_table(self, table):
# ('remove_table', Table)
tblop = DropTable(table)
yield tblop
for fkc in _table_fk_constraints(table):
yield DropForeignKey(fkc, desired_phase=tblop.desired_phase)
def _handle_add_column(self, schema, table_name, column):
# ('add_column', schema, table_name, Column)
kwargs = {}
if table_name == 'migrate_version':
# The column added to migrate_version needs to exist after the
# expand phase runs so locking out 'db sync' can happen.
kwargs['desired_phase'] = 'expand'
yield AddColumn(table_name, column, **kwargs)
def _handle_remove_column(self, schema, table_name, column):
# ('remove_column', schema, table_name, Column)
yield DropColumn(table_name, column)
def _handle_add_constraint(self, constraint):
# ('add_constraint', UniqueConstraint)
if not isinstance(constraint, sqlalchemy.UniqueConstraint):
raise ValueError('Unknown constraint type %r' % constraint)
yield AddUniqueConstraint(constraint)
def _handle_remove_constraint(self, constraint):
# ('remove_constraint', Constraint)
if not isinstance(constraint, sqlalchemy.UniqueConstraint):
raise ValueError('Unknown constraint type %r' % constraint)
yield DropUniqueConstraint(constraint)
def _handle_add_index(self, index):
# ('add_index', Index)
# Include any dialect specific options (mysql_length, etc)
args = {}
for dialect, options in index.dialect_options.items():
for k, v in options.items():
args['%s_%s' % (dialect, k)] = v
yield AddIndex(index, args)
def _handle_remove_index(self, index):
# ('remove_index', Index)
yield DropIndex(index)
def _handle_add_fk(self, fkc):
# ('add_fk', ForeignKeyConstraint)
yield AddForeignKey(fkc)
def _handle_remove_fk(self, fkc):
# ('remove_fk', ForeignKeyConstraint)
yield DropForeignKey(fkc)
def _column_changes(self, diffs):
# Column change (type, nullable, etc)
table_name = diffs[0][2]
column_name = diffs[0][3]
args = {}
for diff in diffs:
cmd = diff[0]
if cmd == 'modify_nullable':
# ('modify_nullable', None, table_name, column_name,
# {'existing_server_default': None,
# 'existing_type': VARCHAR(length=36)},
# conn_nullable, metadata_nullable)
existing_type = diff[4]['existing_type']
nullable = diff[6]
args['existing_type'] = existing_type
args['nullable'] = nullable
elif cmd == 'modify_type':
# ('modify_type', None, table_name, column_name,
# {'existing_nullable': True,
# 'existing_server_default': None},
# TINYINT(display_width=1), Boolean())
existing_nullable = diff[4]['existing_nullable']
new_type = diff[6]
if 'nullable' not in args:
args['nullable'] = existing_nullable
args['type_'] = new_type
else:
msg = _('Unknown alembic cmd %s') % cmd
raise exception.DatabaseMigrationError(reason=msg)
yield AlterColumn(table_name, column_name, args)
def convert_alembic(self, diffs):
ops = []
for diff in diffs:
# Parse out the format into something easier to use than the
# tuple/list format that alembic returns
if isinstance(diff, list):
ret = self._column_changes(diff)
else:
cmd = diff[0]
handler = getattr(self, '_handle_%s' % cmd, None)
if handler is None:
msg = _('Unknown alembic cmd %s') % cmd
raise exception.DatabaseMigrationError(reason=msg)
ret = handler(*diff[1:])
ops.extend(list(ret))
return ops
class Scheduler(object):
def __init__(self, ops=None):
# Set of operations (vertexes)
self.ops = set()
# Operations that have conflicts to process
self.conflictops = set()
# Indirect mapping of operations
self.exists = {}
self.nonexists = {}
# Dependencies and conflicts per op (resolve via mapping)
self.depends = {}
self.conflicts = {}
# Edges per op
self.outbound = {}
self.inbound = {}
if ops is not None:
for op in ops:
self.add(op)
def handle_conflicts(self, metadata):
# Foreign keys can make certain operations fail. The foreign key
# needs to be removed before the operation and then recreated
# after the operation.
#
# This finds all foreign keys that currently exist and determines
# if they could conflict, then it finds any operations that are
# already in the schedule. If appropriate operations don't exist,
# then they are created.
for op in self.conflictops:
for key in self.conflicts[op]:
for fkc in _fkc_matches_key(metadata, key):
# Find any ops that match this key
dropops = self.nonexists.get(key, [])
dropops = [op for op in dropops
if _compare_fkc(fkc, op.fkc)]
addops = self.exists.get(key, [])
addops = [op for op in addops
if _compare_fkc(fkc, op.fkc)]
if not dropops and not addops:
# No drop or add operations for this FK,
# so create some
self.add(DropForeignKey(fkc))
self.add(AddForeignKey(fkc))
# Ensure operation gets scheduled between the drop and add operations
for op in self.conflictops:
for key in self.conflicts[op]:
dropops = self.nonexists.get(key, [])
addops = self.exists.get(key, [])
for dropop in dropops:
self.add_edge(op, dropop)
for addop in addops:
self.add_edge(addop, op)
def add(self, op):
self.ops.add(op)
self.inbound[op] = set()
self.outbound[op] = set()
if op.removes:
mapping = self.nonexists
else:
mapping = self.exists
for key in op.provides:
mapping.setdefault(key, set()).add(op)
self.depends[op] = op.depends
if op.conflicts:
self.conflicts[op] = op.conflicts
self.conflictops.add(op)
def add_edge(self, f, t):
self.inbound[t].add(f)
self.outbound[f].add(t)
def sort(self):
# The topological sort modifies inbound, but we can't do a deepcopy
# since that would deepcopy the key too.
inbound = {}
for key, depends in self.inbound.items():
inbound[key] = set(depends)
toprocess = [v for v in self.ops if not inbound[v]]
inorder = []
while toprocess:
op = toprocess.pop(0)
inorder.insert(0, op)
for depop in self.outbound[op]:
inbound[depop].remove(op)
if not inbound[depop]:
toprocess.insert(0, depop)
del inbound[op]
# Anything remaining in inbound is a dependency loop
if inbound:
msg = _('Dependency loop exists in database migrations')
raise exception.DatabaseMigrationError(reason=msg)
return inorder
def order_drop_add(self):
# Alembic will emit drop/add for indexes if the covered columns change.
# Ensure that the add is scheduled after the drop.
keys = set(self.exists.keys()) & set(self.nonexists.keys())
for key in keys:
dropops = self.nonexists[key]
addops = self.exists[key]
for dropop in dropops:
for addop in addops:
self.add_edge(addop, dropop)
def schedule(self):
# Scheduling tries to move as much of the schema changes to be run
# while services are still running without affecting the services.
# Otherwise known as running the schema changes online.
#
# There are two major factors used:
# 1) Is the schema change compatible with running code? Adding a new
# table is since no code knows about it, but changing a column type
# may not be.
# 2) Does the DDL statement cause the database engine to block access
# to the table and affect running services? This can vary greatly
# depending on the database software (MySQL, PostgreSQL, etc),
# version (5.1, 5.5, 5.6, etc) and the storage engine (MyISAM,
# InnoDB, etc)
#
# Also, dependencies between operations might keep an operation that
# would otherwise be safe to be run online from being run online.
self.order_drop_add()
# Use mapping to create edges between operations
for op, depends in self.depends.items():
if op.removes:
mapping = self.nonexists
else:
mapping = self.exists
for key in depends:
refops = mapping.get(key, [])
for refop in refops:
# Dependency is reversed for drop operations
if op.removes:
self.add_edge(refop, op)
else:
self.add_edge(op, refop)
phases = {
'expand': [],
'migrate': [],
'contract': [],
}
# TODO(johannes): Schedule operations that are safe to run online
# depending on the capabilities of the database engine
for op in self.sort():
phase = op.desired_phase
if phase == 'expand':
depphases = set(o.phase for o in self.outbound[op])
depphases.discard(phase)
if depphases:
# Can't safely move this operation to expand because
# a dependency isn't in expand.
phase = 'migrate'
elif phase == 'contract':
# Since anything that depends on this hasn't had the
# phase determined yet, this has to be naive for now
if self.inbound[op]:
phase = 'migrate'
op.phase = phase
phases[op.phase].append(op)
return phases['expand'], phases['migrate'], phases['contract']
def _add_generated_tables_to_model(metadata, database='main'):
tables = dict(metadata.tables)
for table_name, table in tables.items():
if table_name.startswith('shadow_') or _ignore_table(table_name):
# Don't make a shadow of a shadow table or a table we
# explicitly ignore
continue
shadow_table_name = 'shadow_' + table_name
if shadow_table_name in tables:
# Shadow table already exists in model
continue
columns = [c.copy() for c in table.columns]
sqlalchemy.Table(shadow_table_name, metadata, *columns,
mysql_engine='InnoDB')
# Table is added to metadata as a side-effect of creating the object
repository = _find_migrate_repo(database)
if repository.version_table not in tables:
# The existing migrate_version table is expanded with a locked
# column so the 'db sync' command can be locked out between
# running 'db expand' and 'db contract'.
# locked is probably more appropriate a Boolean, but there is no
# portable way of using server_default in that case. SQLAlchemy
# issue #1204
sqlalchemy.Table(repository.version_table, metadata,
sqlalchemy.Column('repository_id', sqlalchemy.String(250),
primary_key=True),
sqlalchemy.Column('repository_path', sqlalchemy.Text()),
sqlalchemy.Column('version', sqlalchemy.Integer()),
sqlalchemy.Column('locked', sqlalchemy.Integer(),
nullable=False, server_default='0'))
# Table is added to metadata as a side-effect of creating the object
def _schedule_schema_changes(context):
"""Split the list of diffs into expand, migrate and contract phases."""
metadata = models.BASE.metadata
_add_generated_tables_to_model(metadata)
# Take all of the diffs generated by Alembic and convert them into an
# easier to use format along with some dependency information.
diffs = alembic.autogenerate.compare_metadata(context, metadata)
converter = Converter()
ops = converter.convert_alembic(diffs)
scheduler = Scheduler(ops)
reflected_metadata = sqlalchemy.MetaData()
reflected_metadata.reflect(context.bind)
scheduler.handle_conflicts(reflected_metadata)
return scheduler.schedule()
def db_expand(dryrun=False, database='main'):
context = _create_migration_context(as_sql=False)
expand, migrate, contract = _schedule_schema_changes(context)
context = _create_migration_context(as_sql=dryrun)
ddlop = alembic.operations.Operations(context)
for op in expand:
op.execute(ddlop)
if not dryrun:
repository = _find_migrate_repo(database)
if not _set_db_sync_lock(repository, locked=True):
# No rows exist yet. Might be 'db sync' was never run
db_version_control(INIT_VERSION)
_set_db_sync_lock(repository, locked=True)
def db_migrate(dryrun=False, database='main'):
context = _create_migration_context(as_sql=False)
expand, migrate, contract = _schedule_schema_changes(context)
if expand:
msg = _('expand phase still has operations that need to be executed')
raise exception.DatabaseMigrationError(reason=msg)
context = _create_migration_context(as_sql=dryrun)
ddlop = alembic.operations.Operations(context)
for op in migrate:
op.execute(ddlop)
def db_contract(dryrun=False, database='main'):
context = _create_migration_context(as_sql=False)
expand, migrate, contract = _schedule_schema_changes(context)
if expand:
msg = _('expand phase still has operations that need to be executed')
raise exception.DatabaseMigrationError(reason=msg)
if migrate:
msg = _('migrate phase still has operations that need to be executed')
raise exception.DatabaseMigrationError(reason=msg)
context = _create_migration_context(as_sql=dryrun)
ddlop = alembic.operations.Operations(context)
for op in contract:
op.execute(ddlop)
repository = _find_migrate_repo(database)
_set_db_sync_lock(repository, locked=False)
_set_db_sync_version(repository, repository.latest)
def _process_null_records(table, col_name, check_fkeys, delete=False):
"""Queries the database and optionally deletes the NULL records.
:param table: sqlalchemy.Table object.
:param col_name: The name of the column to check in the table.
:param check_fkeys: If True, check the table for foreign keys back to the
instances table and if not found, return.
:param delete: If true, run a delete operation on the table, else just
query for number of records that match the NULL column.
:returns: The number of records processed for the table and column.
"""
records = 0
if col_name in table.columns:
# NOTE(mriedem): filter out tables that don't have a foreign key back
# to the instances table since they could have stale data even if
# instances.uuid wasn't NULL.
if check_fkeys:
fkey_found = False
fkeys = table.c[col_name].foreign_keys or []
for fkey in fkeys:
if fkey.column.table.name == 'instances':
fkey_found = True
if not fkey_found:
return 0
if delete:
records = table.delete().where(
table.c[col_name] == null()
).execute().rowcount
else:
records = len(list(
table.select().where(table.c[col_name] == null()).execute()
))
return records
def db_null_instance_uuid_scan(delete=False):
"""Scans the database for NULL instance_uuid records.
:param delete: If true, delete NULL instance_uuid records found, else
just query to see if they exist for reporting.
:returns: dict of table name to number of hits for NULL instance_uuid rows.
"""
engine = get_engine()
meta = sqlalchemy.MetaData(bind=engine)
# NOTE(mriedem): We're going to load up all of the tables so we can find
# any with an instance_uuid column since those may be foreign keys back
# to the instances table and we want to cleanup those records first. We
# have to do this explicitly because the foreign keys in nova aren't
# defined with cascading deletes.
meta.reflect(engine)
# Keep track of all of the tables that had hits in the query.
processed = {}
for table in reversed(meta.sorted_tables):
# Ignore the fixed_ips table by design.
if table.name not in ('fixed_ips', 'shadow_fixed_ips'):
processed[table.name] = _process_null_records(
table, 'instance_uuid', check_fkeys=True, delete=delete)
# Now process the *instances tables.
for table_name in ('instances', 'shadow_instances'):
table = db_utils.get_table(engine, table_name)
processed[table.name] = _process_null_records(
table, 'uuid', check_fkeys=False, delete=delete)
return processed
def db_version_control(version=None, database='main'):
repository = _find_migrate_repo(database)
versioning_api.version_control(get_engine(database), repository, version)
return version
def _find_migrate_repo(database='main'):
"""Get the path for the migrate repository."""
global _REPOSITORY
rel_path = 'migrate_repo'
if database == 'api':
rel_path = os.path.join('api_migrations', 'migrate_repo')
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
rel_path)
assert os.path.exists(path)
if _REPOSITORY.get(database) is None:
_REPOSITORY[database] = Repository(path)
return _REPOSITORY[database]
| scripnichenko/nova | nova/db/sqlalchemy/migration.py | Python | apache-2.0 | 40,653 |
import os, sys
import imp
import time
from authorizenet import apicontractsv1
from authorizenet.apicontrollers import *
constants = imp.load_source('modulename', 'constants.py')
from decimal import *
def create_an_accept_payment_transaction(amount):
# Create a merchantAuthenticationType object with authentication details
# retrieved from the constants file
merchantAuth = apicontractsv1.merchantAuthenticationType()
merchantAuth.name = constants.apiLoginId
merchantAuth.transactionKey = constants.transactionKey
# Set the transaction's refId
refId = "ref {}".format(time.time())
# Create the payment object for a payment nonce
opaqueData = apicontractsv1.opaqueDataType()
opaqueData.dataDescriptor = "COMMON.ACCEPT.INAPP.PAYMENT"
opaqueData.dataValue = "119eyJjb2RlIjoiNTBfMl8wNjAwMDUyN0JEODE4RjQxOUEyRjhGQkIxMkY0MzdGQjAxQUIwRTY2NjhFNEFCN0VENzE4NTUwMjlGRUU0M0JFMENERUIwQzM2M0ExOUEwMDAzNzlGRDNFMjBCODJEMDFCQjkyNEJDIiwidG9rZW4iOiI5NDkwMjMyMTAyOTQwOTk5NDA0NjAzIiwidiI6IjEuMSJ9"
# Add the payment data to a paymentType object
paymentOne = apicontractsv1.paymentType()
paymentOne.opaqueData = opaqueData
# Create order information
order = apicontractsv1.orderType()
order.invoiceNumber = "10101"
order.description = "Golf Shirts"
# Set the customer's Bill To address
customerAddress = apicontractsv1.customerAddressType()
customerAddress.firstName = "Ellen"
customerAddress.lastName = "Johnson"
customerAddress.company = "Souveniropolis"
customerAddress.address = "14 Main Street"
customerAddress.city = "Pecan Springs"
customerAddress.state = "TX"
customerAddress.zip = "44628"
customerAddress.country = "USA"
# Set the customer's identifying information
customerData = apicontractsv1.customerDataType()
customerData.type = "individual"
customerData.id = "99999456654"
customerData.email = "[email protected]"
# Add values for transaction settings
duplicateWindowSetting = apicontractsv1.settingType()
duplicateWindowSetting.settingName = "duplicateWindow"
duplicateWindowSetting.settingValue = "600"
settings = apicontractsv1.ArrayOfSetting()
settings.setting.append(duplicateWindowSetting)
# Create a transactionRequestType object and add the previous objects to it
transactionrequest = apicontractsv1.transactionRequestType()
transactionrequest.transactionType = "authCaptureTransaction"
transactionrequest.amount = amount
transactionrequest.order = order
transactionrequest.payment = paymentOne
transactionrequest.billTo = customerAddress
transactionrequest.customer = customerData
transactionrequest.transactionSettings = settings
# Assemble the complete transaction request
createtransactionrequest = apicontractsv1.createTransactionRequest()
createtransactionrequest.merchantAuthentication = merchantAuth
createtransactionrequest.refId = refId
createtransactionrequest.transactionRequest = transactionrequest
# Create the controller and get response
createtransactioncontroller = createTransactionController(createtransactionrequest)
createtransactioncontroller.execute()
response = createtransactioncontroller.getresponse()
if response is not None:
# Check to see if the API request was successfully received and acted upon
if response.messages.resultCode == "Ok":
# Since the API request was successful, look for a transaction response
# and parse it to display the results of authorizing the card
if hasattr(response.transactionResponse, 'messages') == True:
print ('Successfully created transaction with Transaction ID: %s' % response.transactionResponse.transId)
print ('Transaction Response Code: %s' % response.transactionResponse.responseCode)
print ('Message Code: %s' % response.transactionResponse.messages.message[0].code)
print ('Auth Code: %s' % response.transactionResponse.authCode)
print ('Description: %s' % response.transactionResponse.messages.message[0].description)
else:
print ('Failed Transaction.')
if hasattr(response.transactionResponse, 'errors') == True:
print ('Error Code: %s' % str(response.transactionResponse.errors.error[0].errorCode))
print ('Error Message: %s' % response.transactionResponse.errors.error[0].errorText)
# Or, print errors if the API request wasn't successful
else:
print ('Failed Transaction.')
if hasattr(response, 'transactionResponse') == True and hasattr(response.transactionResponse, 'errors') == True:
print ('Error Code: %s' % str(response.transactionResponse.errors.error[0].errorCode))
print ('Error Message: %s' % response.transactionResponse.errors.error[0].errorText)
else:
print ('Error Code: %s' % response.messages.message[0]['code'].text)
print ('Error Message: %s' % response.messages.message[0]['text'].text)
else:
print ('Null Response.')
return response
if(os.path.basename(__file__) == os.path.basename(sys.argv[0])):
create_an_accept_payment_transaction(constants.amount)
| AuthorizeNet/sample-code-python | AcceptSuite/create-an-accept-payment-transaction.py | Python | mit | 5,389 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from beethoven import settings
urlpatterns = patterns(
'',
url(r'^$', 'accounts.views.index', name='index'),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('allauth.urls')),
url(r'^', include('courses.urls', namespace='courses')),
url(r'^', include('quizzes.urls', namespace='quizzes')),
url(r'^api/', include('quizzes.api_urls', namespace='quizzes_api'))
)
if settings.PRODUCTION:
urlpatterns += patterns(
'',
(r'^static/(?P<path>.*)$',
'django.views.static.serve',
{'document_root': settings.STATIC_ROOT})
)
| lockhawksp/beethoven | beethoven/urls.py | Python | mit | 683 |
import json
from django.test.client import Client
from django.utils.timezone import now, timedelta, localtime
from django.contrib.auth.models import User
from todo_lists.models import (
ToDoList,
ToDoContext,
ToDo,
)
class ToDoViewsTestCaseMixin(object):
def create_n_todos(self, n, user, todo_list):
self.todos = getattr(self, "todos", [])
for i in range(n):
todo = ToDo.objects.create(
owner=user,
name="User's {user} ToDo no.{no}".format(user=user, no=i+1),
notes="This is a ToDo that will serve for model tests. Its number is {no}".format(no=i+1),
due_time=now()+timedelta(days=i+1), # Saving times in UTC to db
todo_list=todo_list,
)
self.todos.append(todo)
def create_n_todo_lists(self, n, user):
self.todo_lists = getattr(self, "todo_lists", [])
for i in range(n):
todo_list = ToDoList.objects.create(
owner=user,
name="User's {user} ToDo List no.{no}".format(user=user, no=i+1)
)
self.todo_lists.append(todo_list)
def create_n_todo_contexts(self, n, user):
self.todo_contexts = getattr(self, "todo_contexts", [])
for i in range(n):
todo_context = ToDoContext.objects.create(
owner=user,
name="User's {user} ToDo Context no.{no}".format(user=user, no=i+1)
)
self.todo_contexts.append(todo_context)
def get_parsed_json_respone(self, response):
return json.loads(response.content)
def assertAjaxLoginRequired(self, url):
response = Client().get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
parsed_content = self.get_parsed_json_respone(response)
try:
self.assertEqual(parsed_content.get("message"), "Login required")
except AssertionError:
self.fail("The view didn't respond with 'login required' message to a request from anonymous user.")
try:
self.assertTrue("login_url" in parsed_content)
except AssertionError:
self.fail("The view did respond with 'login required' message to a request from anonymous user, but returned no login url.")
def setUp(self):
user1 = User(username="user1", first_name="User", last_name="One", email="[email protected]", is_active=True)
user1.set_password("user1")
user1.save()
self.user1 = user1
user2 = User(username="user2", first_name="User", last_name="Two", email="[email protected]", is_active=True)
user2.set_password("user2")
user2.save()
self.user2 = user2
self.http_client = Client()
self.http_client.login(username="user1", password="user1")
| pawartur/django-todo-lists | todo_lists/tests/mixins.py | Python | bsd-3-clause | 2,802 |
from mock import Mock
import pytest
import pytest_twisted
from twisted.internet import defer
from nav.ipdevpoll.jobs import SuggestedReschedule
from nav.ipdevpoll.plugins.snmpcheck import SnmpCheck
@pytest.fixture
def plugin():
netbox = Mock()
agent = Mock()
containers = dict()
return SnmpCheck(netbox, agent, containers)
@pytest.mark.twisted
@pytest_twisted.inlineCallbacks
def test_should_not_mark_as_up_when_already_up(plugin):
plugin._currently_down = Mock(return_value=False)
plugin._currently_down.__name__ = '_currently_down'
plugin.agent.walk.return_value = defer.succeed(True)
plugin._mark_as_up = Mock()
plugin._mark_as_down = Mock()
yield plugin.handle()
plugin._mark_as_up.assert_not_called()
plugin._mark_as_down.assert_not_called()
@pytest.mark.twisted
@pytest_twisted.inlineCallbacks
def test_should_keep_sending_down_events_when_down(plugin):
plugin._currently_down = Mock(return_value=True)
plugin._currently_down.__name__ = '_currently_down'
plugin.agent.walk.return_value = defer.succeed(False)
plugin._mark_as_up = Mock()
plugin._mark_as_down = Mock()
with pytest.raises(SuggestedReschedule):
yield plugin.handle()
plugin._mark_as_up.assert_not_called()
plugin._mark_as_down.assert_called()
@pytest.mark.twisted
@pytest_twisted.inlineCallbacks
def test_should_mark_as_down_when_transitioning_from_up_to_down(plugin):
plugin._currently_down = Mock(return_value=False)
plugin._currently_down.__name__ = '_currently_down'
plugin.agent.walk.return_value = defer.succeed(False)
plugin._mark_as_up = Mock()
plugin._mark_as_down = Mock()
with pytest.raises(SuggestedReschedule):
yield plugin.handle()
plugin._mark_as_up.assert_not_called()
plugin._mark_as_down.assert_called()
@pytest.mark.twisted
@pytest_twisted.inlineCallbacks
def test_should_mark_as_up_when_transitioning_from_down_to_up(plugin):
plugin._currently_down = Mock(return_value=True)
plugin._currently_down.__name__ = '_currently_down'
plugin.agent.walk.return_value = defer.succeed(True)
plugin._mark_as_up = Mock()
plugin._mark_as_down = Mock()
yield plugin.handle()
plugin._mark_as_down.assert_not_called()
plugin._mark_as_up.assert_called()
@pytest.mark.twisted
@pytest_twisted.inlineCallbacks
def test_do_check_should_report_false_on_timeout(plugin):
plugin.agent.walk.return_value = defer.fail(defer.TimeoutError())
res = yield plugin._do_check()
assert res is False
| hmpf/nav | tests/unittests/ipdevpoll/plugins_snmpcheck_test.py | Python | gpl-3.0 | 2,539 |
# @Author: Manuel Rodriguez <valle>
# @Date: 10-May-2017
# @Email: [email protected]
# @Filename: listview.py
# @Last modified by: valle
# @Last modified time: 01-Mar-2018
# @License: Apache license vesion 2.0
from kivy.uix.anchorlayout import AnchorLayout
from kivy.lang import Builder
from kivy.properties import StringProperty, ListProperty, NumericProperty
from kivy.utils import get_color_from_hex
Builder.load_string('''
#:import get_color kivy.utils.get_color_from_hex
<ValleListView>:
list: _listado
scroll: _scroll
spacing: 5
canvas.before:
Color:
rgb: get_color(root.bgColor)
Rectangle:
size: self.size
pos: self.pos
anchor_x: 'center'
anchor_y: 'center'
ScrollView:
id: _scroll
size_hint: 1, 1
GridLayout:
cols: 1
spacing: root.spacing
size_hint: 1, None
height: len(self.children) * dp(root.cheight)
id: _listado
''')
class ValleListView(AnchorLayout):
bgColor = StringProperty("#b0a18a")
cheight = NumericProperty("40dp")
def add_linea(self, widget):
self.list.add_widget(widget)
self.scroll.scroll_y = 0
def rm_linea(self, widget):
self.list.remove_widget(widget)
def rm_all_widgets(self):
self.list.clear_widgets()
def scroll_up(self, up=1):
self.scroll.scroll_y = up
| vallemrv/tpvB3 | tpv_for_eetop/valle_libs/components/listview.py | Python | apache-2.0 | 1,446 |
'''
Copyright (c) <2012> Tarek Galal <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy of this
software and associated documentation files (the "Software"), to deal in the Software
without restriction, including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR
A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
from ConnectionIO.protocoltreenode import ProtocolTreeNode
from ConnectionIO.ioexceptions import ConnectionClosedException
from ConnectionIO.connectionengine import ConnectionEngine
from Common.debugger import Debugger
import threading, select, time
from Common.watime import WATime
from Auth.auth import YowsupAuth
from Common.constants import Constants
from Interfaces.Lib.LibInterface import LibMethodInterface, LibSignalInterface
import thread
from random import randrange
import socket
import hashlib
import base64
import sys
import traceback
class YowsupConnectionManager:
def __init__(self):
Debugger.attach(self)
self.currKeyId = 1
self.iqId = 0
self.verbose = True
self.state = 0
self.lock = threading.Lock()
self.autoPong = True
self.domain = "s.whatsapp.net"
#self.methodInterface = MethodInterface(authenticatedSocketConnection.getId())
#self.signalInterface = SignalInterface(authenticatedSocketConnection.getId())
self.readerThread = None
self.methodInterface = LibMethodInterface()
self.signalInterface = LibSignalInterface()
self.readerThread = ReaderThread()
self.readerThread.setSignalInterface(self.signalInterface)
self.bindMethods()
def setInterfaces(self, signalInterface, methodInterface):
self.methodInterface = methodInterface
self.signalInterface = signalInterface
self.readerThread.setSignalInterface(self.signalInterface)
self.bindMethods()
def getSignalsInterface(self):
return self.signalInterface
def getMethodsInterface(self):
return self.methodInterface
def setAutoPong(self, autoPong):
self.autoPong = self.readerThread.autoPong = autoPong
def startReader(self):
if self.readerThread.isAlive():
self._d("Reader already started")
return 0
self._d("starting reader")
try:
self.readerThread.start()
self._d("started")
except RuntimeError:
self._d("Reader already started before")
self.readerThread.sendDisconnected()
return 0
return 1
def block(self):
self.readerThread.join()
def bindMethods(self):
self.methodInterface.registerCallback("getVersion", lambda: Constants.v)
self.methodInterface.registerCallback("message_send",self.sendText)
self.methodInterface.registerCallback("message_imageSend",self.sendImage)
self.methodInterface.registerCallback("message_audioSend",self.sendAudio)
self.methodInterface.registerCallback("message_videoSend",self.sendVideo)
self.methodInterface.registerCallback("message_locationSend",self.sendLocation)
self.methodInterface.registerCallback("message_vcardSend",self.sendVCard)
self.methodInterface.registerCallback("message_ack",self.sendMessageReceipt)
self.methodInterface.registerCallback("notification_ack", self.sendNotificationReceipt)
self.methodInterface.registerCallback("clientconfig_send",self.sendClientConfig)
self.methodInterface.registerCallback("delivered_ack",self.sendDeliveredReceiptAck)
self.methodInterface.registerCallback("visible_ack",self.sendVisibleReceiptAck)
self.methodInterface.registerCallback("ping",self.sendPing)
self.methodInterface.registerCallback("pong",self.sendPong)
self.methodInterface.registerCallback("typing_send",self.sendTyping)
self.methodInterface.registerCallback("typing_paused",self.sendPaused)
self.methodInterface.registerCallback("subject_ack",self.sendSubjectReceived)
self.methodInterface.registerCallback("group_getGroups", self.sendGetGroups)
self.methodInterface.registerCallback("group_getInfo",self.sendGetGroupInfo)
self.methodInterface.registerCallback("group_create",self.sendCreateGroupChat)
self.methodInterface.registerCallback("group_addParticipants",self.sendAddParticipants)
self.methodInterface.registerCallback("group_removeParticipants",self.sendRemoveParticipants)
self.methodInterface.registerCallback("group_end",self.sendEndGroupChat)
self.methodInterface.registerCallback("group_setSubject",self.sendSetGroupSubject)
self.methodInterface.registerCallback("group_setPicture", self.sendSetPicture)
self.methodInterface.registerCallback("group_getPicture", self.sendGetPicture)
self.methodInterface.registerCallback("group_getParticipants",self.sendGetParticipants)
self.methodInterface.registerCallback("picture_get",self.sendGetPicture)
self.methodInterface.registerCallback("picture_getIds",self.sendGetPictureIds)
self.methodInterface.registerCallback("contact_getProfilePicture", self.sendGetPicture)
self.methodInterface.registerCallback("status_update",self.sendChangeStatus)
self.methodInterface.registerCallback("presence_request",self.getLastOnline)
#self.methodInterface.registerCallback("presence_unsubscribe",self.sendUnsubscribe)#@@TODO implement method
self.methodInterface.registerCallback("presence_subscribe",self.sendSubscribe)
self.methodInterface.registerCallback("presence_sendAvailableForChat",self.sendAvailableForChat)
self.methodInterface.registerCallback("presence_sendAvailable",self.sendAvailable)
self.methodInterface.registerCallback("presence_sendUnavailable",self.sendUnavailable)
self.methodInterface.registerCallback("profile_setPicture", self.sendSetProfilePicture)
self.methodInterface.registerCallback("profile_getPicture", self.sendGetProfilePicture)
self.methodInterface.registerCallback("profile_setStatus", self.sendChangeStatus)
self.methodInterface.registerCallback("disconnect", self.disconnect)
self.methodInterface.registerCallback("ready", self.startReader)
self.methodInterface.registerCallback("auth_login", self.auth )
#self.methodInterface.registerCallback("auth_login", self.auth)
def disconnect(self, reason=""):
self._d("Disconnect sequence initiated")
self._d("Sending term signal to reader thread")
if self.readerThread.isAlive():
self.readerThread.terminate()
self._d("Shutting down socket")
self.socket.close()
self._d("Waiting for readerThread to die")
self.readerThread.join()
self._d("Disconnected!")
self._d(reason)
self.state = 0
self.readerThread.sendDisconnected(reason)
def getConnection(self):
return self.socket
def triggerEvent(self, eventName, stanza):
if self.events.has_key(eventName) and self.events[eventName] is not None:
self.events[eventName](stanza)
def bindEvent(self, eventName, callback):
if self.events.has_key(eventName):
self.events[eventName] = callback
##########################################################
def _writeNode(self, node):
if self.state == 2:
try:
self.out.write(node)
return True
except ConnectionClosedException:
self._d("CONNECTION DOWN")
#self.disconnect("closed")
if self.readerThread.isAlive():
self.readerThread.terminate()
self.readerThread.join()
self.readerThread.sendDisconnected("closed")
return False
def onDisconnected(self):
self._d("Setting state to 0")
self.state = 0
def auth(self, username, password):
self._d(">>>>>>>> AUTH CALLED")
username = str(username)
password = str(password)
#traceback.print_stack()
self.lock.acquire()
if self.state == 0 :
if self.readerThread.isAlive():
raise Exception("TWO READER THREADS ON BOARD!!")
self.readerThread = ReaderThread()
self.readerThread.autoPong = self.autoPong
self.readerThread.setSignalInterface(self.signalInterface)
yAuth = YowsupAuth(ConnectionEngine())
try:
self.state = 1
connection = yAuth.authenticate(username, password, Constants.domain, Constants.resource)
except socket.gaierror:
self._d("DNS ERROR")
self.readerThread.sendDisconnected("dns")
#self.signalInterface.send("disconnected", ("dns",))
self.lock.release()
self.state = 0
return 0
except socket.error:
self._d("Socket error, connection timed out")
self.readerThread.sendDisconnected("closed")
#self.signalInterface.send("disconnected", ("closed",))
self.lock.release()
self.state = 0
return 0
except ConnectionClosedException:
self._d("Conn closed Exception")
self.readerThread.sendDisconnected("closed")
#self.signalInterface.send("disconnected", ("closed",))
self.lock.release()
self.state = 0
return 0
if not connection:
self.state = 0
self.signalInterface.send("auth_fail", (username, "invalid"))
self.lock.release()
return 0
self.state = 2
self.socket = connection
self.jid = self.socket.jid
#@@TODO REPLACE PROPERLY
self.out = self.socket.writer
self.readerThread.setSocket(self.socket)
self.readerThread.disconnectedCallback = self.onDisconnected
self.readerThread.onPing = self.sendPong
self.readerThread.ping = self.sendPing
self.signalInterface.send("auth_success", (username,))
self.lock.release()
def sendTyping(self,jid):
self._d("SEND TYPING TO JID")
composing = ProtocolTreeNode("composing",{"xmlns":"http://jabber.org/protocol/chatstates"})
message = ProtocolTreeNode("message",{"to":jid,"type":"chat"},[composing]);
self._writeNode(message);
def sendPaused(self,jid):
self._d("SEND PAUSED TO JID")
composing = ProtocolTreeNode("paused",{"xmlns":"http://jabber.org/protocol/chatstates"})
message = ProtocolTreeNode("message",{"to":jid,"type":"chat"},[composing]);
self._writeNode(message);
def getSubjectMessage(self,to,msg_id,child):
messageNode = ProtocolTreeNode("message",{"to":to,"type":"subject","id":msg_id},[child]);
return messageNode
def sendSubjectReceived(self,to,msg_id):
self._d("Sending subject recv receipt")
receivedNode = ProtocolTreeNode("received",{"xmlns": "urn:xmpp:receipts"});
messageNode = self.getSubjectMessage(to,msg_id,receivedNode);
self._writeNode(messageNode);
def sendMessageReceipt(self, jid, msgId):
self.sendReceipt(jid, "chat", msgId)
def sendNotificationReceipt(self, jid, notificationId):
self.sendReceipt(jid, "notification", notificationId)
def sendReceipt(self,jid,mtype,mid):
self._d("sending message received to "+jid+" - type:"+mtype+" - id:"+mid)
receivedNode = ProtocolTreeNode("received",{"xmlns": "urn:xmpp:receipts"})
messageNode = ProtocolTreeNode("message",{"to":jid,"type":mtype,"id":mid},[receivedNode]);
self._writeNode(messageNode);
def sendDeliveredReceiptAck(self,to,msg_id):
self._writeNode(self.getReceiptAck(to,msg_id,"delivered"));
def sendVisibleReceiptAck(self,to,msg_id):
self._writeNode(self.getReceiptAck(to,msg_id,"visible"));
def getReceiptAck(self,to,msg_id,receiptType):
ackNode = ProtocolTreeNode("ack",{"xmlns":"urn:xmpp:receipts","type":receiptType})
messageNode = ProtocolTreeNode("message",{"to":to,"type":"chat","id":msg_id},[ackNode]);
return messageNode;
def makeId(self,prefix):
self.iqId += 1
idx = ""
if self.verbose:
idx += prefix + str(self.iqId);
else:
idx = "%x" % self.iqId
return idx
def sendPing(self):
idx = self.makeId("ping_")
self.readerThread.requests[idx] = self.readerThread.parsePingResponse;
pingNode = ProtocolTreeNode("ping",{"xmlns":"w:p"});
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"get","to":self.domain},[pingNode]);
self._writeNode(iqNode);
return idx
def sendPong(self,idx):
iqNode = ProtocolTreeNode("iq",{"type":"result","to":self.domain,"id":idx})
self._writeNode(iqNode);
def getLastOnline(self,jid):
if len(jid.split('-')) == 2 or jid == "[email protected]": #SUPER CANCEL SUBSCRIBE TO GROUP AND SERVER
return
self.sendSubscribe(jid);
self._d("presence request Initiated for %s"%(jid))
idx = self.makeId("last_")
self.readerThread.requests[idx] = self.readerThread.parseLastOnline;
query = ProtocolTreeNode("query",{"xmlns":"jabber:iq:last"});
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"get","to":jid},[query]);
self._writeNode(iqNode)
def sendIq(self):
node = ProtocolTreeNode("iq",{"to":"g.us","type":"get","id":str(int(time.time()))+"-0"},None,'expired');
self._writeNode(node);
node = ProtocolTreeNode("iq",{"to":"s.whatsapp.net","type":"set","id":str(int(time.time()))+"-1"},None,'expired');
self._writeNode(node);
def sendAvailableForChat(self, pushname):
presenceNode = ProtocolTreeNode("presence",{"name":pushname})
self._writeNode(presenceNode);
def sendAvailable(self):
presenceNode = ProtocolTreeNode("presence",{"type":"available"})
self._writeNode(presenceNode);
def sendUnavailable(self):
presenceNode = ProtocolTreeNode("presence",{"type":"unavailable"})
self._writeNode(presenceNode);
def sendSubscribe(self,to):
presenceNode = ProtocolTreeNode("presence",{"type":"subscribe","to":to});
self._writeNode(presenceNode);
def mediaNode(fn):
def wrapped(self, *args):
mediaType = fn(self, *args)
url = args[1]
name = args[2]
size = args[3]
mmNode = ProtocolTreeNode("media", {"xmlns":"urn:xmpp:whatsapp:mms","type":mediaType,"file":name,"size":size,"url":url},None, args[4:][0] if args[4:] else None);
return mmNode
return wrapped
def sendMessage(fn):
def wrapped(self, *args):
node = fn(self, *args)
jid = args[0]
messageNode = self.getMessageNode(jid, node)
self._writeNode(messageNode);
return messageNode.getAttributeValue("id")
return wrapped
def sendChangeStatus(self,status):
self._d("updating status to: %s"%(status))
bodyNode = ProtocolTreeNode("body",None,None,status);
messageNode = self.getMessageNode("s.us",bodyNode)
self._writeNode(messageNode);
return messageNode.getAttributeValue("id")
@sendMessage
def sendText(self,jid, content):
return ProtocolTreeNode("body",None,None,content);
@sendMessage
@mediaNode
def sendImage(self, jid, url, name, size, preview):
return "image"
@sendMessage
@mediaNode
def sendVideo(self, jid, url, name, size, preview):
return "video"
@sendMessage
@mediaNode
def sendAudio(self, jid, url, name, size):
return "audio"
@sendMessage
def sendLocation(self, jid, latitude, longitude, preview):
self._d("sending location (" + latitude + ":" + longitude + ")")
return ProtocolTreeNode("media", {"xmlns":"urn:xmpp:whatsapp:mms","type":"location","latitude":latitude,"longitude":longitude},None,preview)
@sendMessage
def sendVCard(self, jid, data, name):
cardNode = ProtocolTreeNode("vcard",{"name":name},None,data);
return ProtocolTreeNode("media", {"xmlns":"urn:xmpp:whatsapp:mms","type":"vcard"},[cardNode])
def sendClientConfig(self,sound,pushID,preview,platform):
idx = self.makeId("config_");
configNode = ProtocolTreeNode("config",{"xmlns":"urn:xmpp:whatsapp:push","sound":sound,"id":pushID,"preview":"1" if preview else "0","platform":platform})
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"set","to":self.domain},[configNode]);
self._writeNode(iqNode);
def sendGetGroups(self,gtype):
self._d("getting groups %s"%(gtype))
idx = self.makeId("get_groups_")
self.readerThread.requests[idx] = self.readerThread.parseGroups;
queryNode = ProtocolTreeNode("list",{"xmlns":"w:g","type":gtype})
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"get","to":"g.us"},[queryNode])
self._writeNode(iqNode)
def sendGetGroupInfo(self,jid):
self._d("getting group info for %s"%(jid))
idx = self.makeId("get_g_info_")
self.readerThread.requests[idx] = self.readerThread.parseGroupInfo;
queryNode = ProtocolTreeNode("query",{"xmlns":"w:g"})
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"get","to":jid},[queryNode])
self._writeNode(iqNode)
def sendCreateGroupChat(self,subject):
self._d("creating group: %s"%(subject))
idx = self.makeId("create_group_")
self.readerThread.requests[idx] = self.readerThread.parseGroupCreated;
queryNode = ProtocolTreeNode("group",{"xmlns":"w:g","action":"create","subject":subject})
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"set","to":"g.us"},[queryNode])
self._writeNode(iqNode)
def sendAddParticipants(self,gjid,participants):
self._d("opening group: %s"%(gjid))
self._d("adding participants: %s"%(participants))
idx = self.makeId("add_group_participants_")
self.readerThread.requests[idx] = self.readerThread.parseAddedParticipants;
parts = participants.split(',')
innerNodeChildren = []
i = 0;
for part in parts:
if part != "undefined":
innerNodeChildren.append( ProtocolTreeNode("participant",{"jid":part}) )
i = i + 1;
queryNode = ProtocolTreeNode("add",{"xmlns":"w:g"},innerNodeChildren)
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"set","to":gjid},[queryNode])
self._writeNode(iqNode)
def sendRemoveParticipants(self,gjid,participants):
self._d("opening group: %s"%(gjid))
self._d("removing participants: %s"%(participants))
idx = self.makeId("remove_group_participants_")
self.readerThread.requests[idx] = self.readerThread.parseRemovedParticipants;
parts = participants.split(',')
innerNodeChildren = []
i = 0;
for part in parts:
if part != "undefined":
innerNodeChildren.append( ProtocolTreeNode("participant",{"jid":part}) )
i = i + 1;
queryNode = ProtocolTreeNode("remove",{"xmlns":"w:g"},innerNodeChildren)
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"set","to":gjid},[queryNode])
self._writeNode(iqNode)
def sendEndGroupChat(self,gjid):
self._d("removing group: %s"%(gjid))
idx = self.makeId("leave_group_")
self.readerThread.requests[idx] = self.readerThread.parseGroupEnded;
innerNodeChildren = []
innerNodeChildren.append( ProtocolTreeNode("group",{"id":gjid}) )
queryNode = ProtocolTreeNode("leave",{"xmlns":"w:g"},innerNodeChildren)
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"set","to":"g.us"},[queryNode])
self._writeNode(iqNode)
def sendSetGroupSubject(self,gjid,subject):
#subject = subject.encode('utf-8')
#self._d("setting group subject of " + gjid + " to " + subject)
idx = self.makeId("set_group_subject_")
self.readerThread.requests[idx] = self.readerThread.parseGroupSubject
queryNode = ProtocolTreeNode("subject",{"xmlns":"w:g","value":subject})
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"set","to":gjid},[queryNode]);
self._writeNode(iqNode)
def sendGetParticipants(self,jid):
idx = self.makeId("get_participants_")
self.readerThread.requests[idx] = self.readerThread.parseParticipants
listNode = ProtocolTreeNode("list",{"xmlns":"w:g"})
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"get","to":jid},[listNode]);
self._writeNode(iqNode)
def sendGetPicture(self,jid):
self._d("GETTING PICTURE FROM " + jid)
idx = self.makeId("get_picture_")
#@@TODO, ?!
self.readerThread.requests[idx] = self.readerThread.parseGetPicture
listNode = ProtocolTreeNode("picture",{"xmlns":"w:profile:picture","type":"image"})
iqNode = ProtocolTreeNode("iq",{"id":idx,"to":jid,"type":"get"},[listNode]);
self._writeNode(iqNode)
def sendGetPictureIds(self,jids):
idx = self.makeId("get_picture_ids_")
self.readerThread.requests[idx] = self.readerThread.parseGetPictureIds
parts = jids.split(',')
innerNodeChildren = []
i = 0;
for part in parts:
if part != "undefined":
innerNodeChildren.append( ProtocolTreeNode("user",{"jid":part}) )
i = i + 1;
queryNode = ProtocolTreeNode("list",{"xmlns":"w:profile:picture"},innerNodeChildren)
iqNode = ProtocolTreeNode("iq",{"id":idx,"type":"get"},[queryNode])
self._writeNode(iqNode)
def sendGetProfilePicture(self):
return self.sendGetPicture(self.jid)
def sendSetProfilePicture(self, filepath):
return self.sendSetPicture(self.jid, filepath)
def sendSetPicture(self, jid, imagePath):
f = open(imagePath, 'r')
imageData = f.read()
imageData = bytearray(imageData)
f.close()
idx = self.makeId("set_picture_")
self.readerThread.requests[idx] = self.readerThread.parseSetPicture
listNode = ProtocolTreeNode("picture",{"xmlns":"w:profile:picture","type":"image"}, None, imageData)
iqNode = ProtocolTreeNode("iq",{"id":idx,"to":jid,"type":"set"},[listNode])
self._writeNode(iqNode)
def getMessageNode(self, jid, child):
requestNode = None;
serverNode = ProtocolTreeNode("server",None);
xNode = ProtocolTreeNode("x",{"xmlns":"jabber:x:event"},[serverNode]);
childCount = (0 if requestNode is None else 1) +2;
messageChildren = [None]*childCount;
i = 0;
if requestNode is not None:
messageChildren[i] = requestNode;
i+=1;
#System.currentTimeMillis() / 1000L + "-"+1
messageChildren[i] = xNode;
i+=1;
messageChildren[i]= child;
i+=1;
msgId = str(int(time.time()))+"-"+ str(self.currKeyId)
messageNode = ProtocolTreeNode("message",{"to":jid,"type":"chat","id":msgId},messageChildren)
self.currKeyId += 1
return messageNode;
class ReaderThread(threading.Thread):
def __init__(self):
Debugger.attach(self);
self.signalInterface = None
#self.socket = connection
self.terminateRequested = False
self.disconnectedSent = False
self.timeout = 240
self.selectTimeout = 3
self.requests = {};
self.lock = threading.Lock()
self.disconnectedCallback = None
self.autoPong = True
self.onPing = self.ping = None
self.lastPongTime = int(time.time())
super(ReaderThread,self).__init__();
self.daemon = True
def setSocket(self, connection):
self.socket = connection
def setSignalInterface(self, signalInterface):
self.signalInterface = signalInterface
def terminate(self):
self._d("attempting to exit gracefully")
self.terminateRequested = True
def sendDisconnected(self, reason="noreason"):
self._d("Sending disconnected because of %s" % reason)
self.lock.acquire()
if not self.disconnectedSent:
self.disconnectedSent = True
if self.disconnectedCallback:
self.disconnectedCallback()
self.lock.release()
self.signalInterface.send("disconnected", (reason,))
def run(self):
self._d("Read thread startedX");
while True:
countdown = self.timeout - ((int(time.time()) - self.lastPongTime))
remainder = countdown % self.selectTimeout
countdown = countdown - remainder
if countdown <= 0:
self._d("No hope, dying!")
self.sendDisconnected("closed")
return
else:
if countdown % (self.selectTimeout*10) == 0 or countdown < 11:
self._d("Waiting, time to die: T-%i seconds" % countdown )
if self.timeout-countdown == 210 and self.ping and self.autoPong:
self.ping()
self.selectTimeout = 1 if countdown < 11 else 3
try:
ready = select.select([self.socket.reader.rawIn], [], [], self.selectTimeout)
except:
self._d("Error in ready")
raise
return
if self.terminateRequested:
return
if ready[0]:
try:
node = self.socket.reader.nextTree()
except ConnectionClosedException:
#print traceback.format_exc()
self._d("Socket closed, got 0 bytes!")
#self.signalInterface.send("disconnected", ("closed",))
self.sendDisconnected("closed")
return
self.lastPongTime = int(time.time());
if node is not None:
if ProtocolTreeNode.tagEquals(node,"iq"):
iqType = node.getAttributeValue("type")
idx = node.getAttributeValue("id")
if iqType is None:
raise Exception("iq doesn't have type")
if iqType == "result":
if self.requests.has_key(idx):
self.requests[idx](node)
del self.requests[idx]
elif idx.startswith(self.connection.user):
accountNode = node.getChild(0)
ProtocolTreeNode.require(accountNode,"account")
kind = accountNode.getAttributeValue("kind")
if kind == "paid":
self.connection.account_kind = 1
elif kind == "free":
self.connection.account_kind = 0
else:
self.connection.account_kind = -1
expiration = accountNode.getAttributeValue("expiration")
if expiration is None:
raise Exception("no expiration")
try:
self.connection.expire_date = long(expiration)
except ValueError:
raise IOError("invalid expire date %s"%(expiration))
self.eventHandler.onAccountChanged(self.connection.account_kind,self.connection.expire_date)
elif iqType == "error":
if self.requests.has_key(idx):
self.requests[idx](node)
del self.requests[idx]
elif iqType == "get":
childNode = node.getChild(0)
if ProtocolTreeNode.tagEquals(childNode,"ping"):
if self.autoPong:
self.onPing(idx)
self.signalInterface.send("ping", (idx,))
elif ProtocolTreeNode.tagEquals(childNode,"query") and node.getAttributeValue("from") is not None and "http://jabber.org/protocol/disco#info" == childNode.getAttributeValue("xmlns"):
pin = childNode.getAttributeValue("pin");
timeoutString = childNode.getAttributeValue("timeout");
try:
timeoutSeconds = int(timeoutString) if timeoutString is not None else None
except ValueError:
raise Exception("relay-iq exception parsing timeout %s "%(timeoutString))
if pin is not None:
self.eventHandler.onRelayRequest(pin,timeoutSeconds,idx)
elif iqType == "set":
childNode = node.getChild(0)
if ProtocolTreeNode.tagEquals(childNode,"query"):
xmlns = childNode.getAttributeValue("xmlns")
if xmlns == "jabber:iq:roster":
itemNodes = childNode.getAllChildren("item");
ask = ""
for itemNode in itemNodes:
jid = itemNode.getAttributeValue("jid")
subscription = itemNode.getAttributeValue("subscription")
ask = itemNode.getAttributeValue("ask")
else:
raise Exception("Unkown iq type %s"%(iqType))
elif ProtocolTreeNode.tagEquals(node,"presence"):
xmlns = node.getAttributeValue("xmlns")
jid = node.getAttributeValue("from")
if (xmlns is None or xmlns == "urn:xmpp") and jid is not None:
presenceType = node.getAttributeValue("type")
if presenceType == "unavailable":
self.signalInterface.send("presence_unavailable", (jid,))
elif presenceType is None or presenceType == "available":
self.signalInterface.send("presence_available", (jid,))
elif xmlns == "w" and jid is not None:
status = node.getAttributeValue("status")
if status == "dirty":
#categories = self.parseCategories(node); #@@TODO, send along with signal
self._d("WILL SEND DIRTY")
self.signalInterface.send("status_dirty")
self._d("SENT DIRTY")
elif ProtocolTreeNode.tagEquals(node,"message"):
self.parseMessage(node)
self._d("Reader thread terminating now!")
def parseOfflineMessageStamp(self,stamp):
watime = WATime();
parsed = watime.parseIso(stamp)
local = watime.utcToLocal(parsed)
stamp = watime.datetimeToTimestamp(local)
return stamp
def parsePingResponse(self, node):
idx = node.getAttributeValue("id")
self.lastPongTime = int(time.time())
def parseLastOnline(self,node):
jid = node.getAttributeValue("from");
firstChild = node.getChild(0);
if "error" in firstChild.toString():
return
ProtocolTreeNode.require(firstChild,"query");
seconds = firstChild.getAttributeValue("seconds");
status = None
status = firstChild.data #@@TODO discarded?
try:
if seconds is not None and jid is not None:
self.signalInterface.send("presence_updated", (jid, int(seconds)))
except:
self._d("Ignored exception in handleLastOnline "+ sys.exc_info()[1])
def parseGroups(self,node):
children = node.getAllChildren("group");
groups = []
for groupNode in children:
gJid = groupNode.getAttributeValue("id") + "@g.us"
ownerJid = groupNode.getAttributeValue("owner")
subject = groupNode.getAttributeValue("subject")
subjectOwnerJid = groupNode.getAttributeValue("s_o")
subjectT = groupNode.getAttributeValue("s_t")
creation = groupNode.getAttributeValue("creation")
groups.append({"gJid":gJid, "ownerJid":ownerJid, "subject":subject, "subjectOwnerJid":subjectOwnerJid, "subjectT":subjectT, "creation":creation})
self.signalInterface.send("group_gotGroups", (groups,))
def parseGroupInfo(self,node):
jid = node.getAttributeValue("from");
groupNode = node.getChild(0)
if "error code" in groupNode.toString():
self.signalInterface.send("group_infoError",(0,)) #@@TODO replace with real error code
else:
ProtocolTreeNode.require(groupNode,"group")
#gid = groupNode.getAttributeValue("id")
owner = groupNode.getAttributeValue("owner")
subject = groupNode.getAttributeValue("subject")
subjectT = groupNode.getAttributeValue("s_t")
subjectOwner = groupNode.getAttributeValue("s_o")
creation = groupNode.getAttributeValue("creation")
self.signalInterface.send("group_gotInfo",(jid, owner, subject, subjectOwner, int(subjectT),int(creation)))
def parseAddedParticipants(self, node):
jid = node.getAttributeValue("from");
self.signalInterface.send("group_addParticipantsSuccess", (jid,))
def parseRemovedParticipants(self,node): #fromm, successVector=None,failTable=None
jid = node.getAttributeValue("from");
self._d("handleRemovedParticipants DONE!");
self.signalInterface.send("group_removeParticipantsSuccess", (jid,))
def parseGroupCreated(self,node):
jid = node.getAttributeValue("from");
groupNode = node.getChild(0)
if ProtocolTreeNode.tagEquals(groupNode,"error"):
errorCode = groupNode.getAttributeValue("code")
self.signalInterface.send("group_createFail", (errorCode,))
return
ProtocolTreeNode.require(groupNode,"group")
group_id = groupNode.getAttributeValue("id")
self.signalInterface.send("group_createSuccess", (jid, group_id))
def parseGroupEnded(self,node):
jid = node.getAttributeValue("from");
self.signalInterface.send("group_endSuccess", (jid,))
def parseGroupSubject(self,node):
jid = node.getAttributeValue("from");
self.signalInterface.send("group_setSubjectSuccess", (jid,))
def parseParticipants(self,node):
jid = node.getAttributeValue("from");
children = node.getAllChildren("participant");
jids = []
for c in children:
jids.append(c.getAttributeValue("jid"))
self.signalInterface.send("group_gotParticipants", (jid, jids))
#@@TODO PICTURE STUFF
def createTmpFile(self, identifier ,data):
tmpDir = "/tmp"
filename = "%s/wazapp_%i_%s" % (tmpDir, randrange(0,100000) , hashlib.md5(identifier).hexdigest())
tmpfile = open(filename, "w")
tmpfile.write(data)
tmpfile.close()
return filename
def parseGetPicture(self,node):
jid = node.getAttributeValue("from");
if "error code" in node.toString():
return;
data = node.getChild("picture").toString()
if data is not None:
n = data.find(">") +2
data = data[n:]
data = data.replace("</picture>","")
tmp = self.createTmpFile("picture_%s" % jid, data)
try:
jid.index('-')
self.signalInterface.send("group_gotPicture", (jid, tmp))
except ValueError:
self.signalInterface.send("contact_gotProfilePicture", (jid, tmp))
def parseGetPictureIds(self,node):
jid = node.getAttributeValue("from");
groupNode = node.getChild("list")
#self._d(groupNode.toString())
children = groupNode.getAllChildren("user");
#pids = []
for c in children:
if c.getAttributeValue("id") is not None:
#pids.append({"jid":c.getAttributeValue("jid"),"id":c.getAttributeValue("id")})
self.signalInterface.send("contact_gotProfilePictureId", (c.getAttributeValue("jid"), c.getAttributeValue("id")))
#self.signalInterface.send("contact_gotProfilePictureIds", (pids,))
def parseSetPicture(self,node):
jid = node.getAttributeValue("from");
picNode = node.getChild("picture")
try:
jid.index('-')
if picNode is None:
self.signalInterface.send("group_setPictureError", (jid,0)) #@@TODO SEND correct error code
else:
self.signalInterface.send("group_setPictureSuccess", (jid,))
except ValueError:
if picNode is None:
self.signalInterface.send("profile_setPictureError", (0,)) #@@TODO SEND correct error code
else:
self.signalInterface.send("profile_setPictureSuccess")
def parseMessage(self,messageNode):
bodyNode = messageNode.getChild("body");
# offlineNode = messageNode.getChild("offline")
newSubject = "" if bodyNode is None else bodyNode.data;
msgData = None
# timestamp =long(time.time()*1000) if not offlineNode else int(messageNode.getAttributeValue("t"))*1000;
timestamp =int(messageNode.getAttributeValue("t"))
isGroup = False
if newSubject.find("New version of WhatsApp Messenger is now available")>-1:
self._d("Rejecting whatsapp server message")
return #REJECT THIS FUCKING MESSAGE!
fromAttribute = messageNode.getAttributeValue("from");
try:
fromAttribute.index('-')
isGroup = True
except:
pass
author = messageNode.getAttributeValue("author");
#@@TODO reactivate blocked contacts check from client
'''if fromAttribute is not None and fromAttribute in self.eventHandler.blockedContacts:
self._d("CONTACT BLOCKED!")
return
if author is not None and author in self.eventHandler.blockedContacts:
self._d("CONTACT BLOCKED!")
return
'''
pushName = None
notifNode = messageNode.getChild("notify")
if notifNode is not None:
pushName = notifNode.getAttributeValue("name");
pushName = pushName.decode("utf8")
msgId = messageNode.getAttributeValue("id");
attribute_t = messageNode.getAttributeValue("t");
typeAttribute = messageNode.getAttributeValue("type");
if typeAttribute == "error":
errorCode = 0;
errorNodes = messageNode.getAllChildren("error");
for errorNode in errorNodes:
codeString = errorNode.getAttributeValue("code")
try:
errorCode = int(codeString);
except ValueError:
'''catch value error'''
self.signalInterface.send("message_error", (msgId, fromAttribute, errorCode))
elif typeAttribute == "notification":
receiptRequested = False;
pictureUpdated = None
pictureUpdated = messageNode.getChild("notification").getAttributeValue("type");
wr = None
wr = messageNode.getChild("request").getAttributeValue("xmlns");
if wr == "urn:xmpp:receipts":
receiptRequested = True
if pictureUpdated == "picture":
bodyNode = messageNode.getChild("notification").getChild("set") or messageNode.getChild("notification").getChild("delete")
if isGroup:
self.signalInterface.send("notification_groupPictureUpdated",(bodyNode.getAttributeValue("jid"), bodyNode.getAttributeValue("author"), timestamp, msgId, receiptRequested))
else:
self.signalInterface.send("notification_contactProfilePictureUpdated",(bodyNode.getAttributeValue("jid"), timestamp, msgId, receiptRequested))
else:
addSubject = None
removeSubject = None
author = None
bodyNode = messageNode.getChild("notification").getChild("add");
if bodyNode is not None:
addSubject = bodyNode.getAttributeValue("jid");
author = bodyNode.getAttributeValue("author") or addSubject
bodyNode = messageNode.getChild("notification").getChild("remove");
if bodyNode is not None:
removeSubject = bodyNode.getAttributeValue("jid");
author = bodyNode.getAttributeValue("author") or removeSubject
if addSubject is not None:
self.signalInterface.send("notification_groupParticipantAdded", (fromAttribute, addSubject, author, timestamp, msgId, receiptRequested))
if removeSubject is not None:
self.signalInterface.send("notification_groupParticipantRemoved", (fromAttribute, removeSubject, author, timestamp, msgId, receiptRequested))
elif typeAttribute == "subject":
receiptRequested = False;
requestNodes = messageNode.getAllChildren("request");
for requestNode in requestNodes:
if requestNode.getAttributeValue("xmlns") == "urn:xmpp:receipts":
receiptRequested = True;
bodyNode = messageNode.getChild("body");
newSubject = None if bodyNode is None else bodyNode.data;
if newSubject is not None:
self.signalInterface.send("group_subjectReceived",(msgId, fromAttribute, author, newSubject, int(attribute_t), receiptRequested))
elif typeAttribute == "chat":
wantsReceipt = False;
messageChildren = [] if messageNode.children is None else messageNode.children
for childNode in messageChildren:
if ProtocolTreeNode.tagEquals(childNode,"request"):
wantsReceipt = True;
if ProtocolTreeNode.tagEquals(childNode,"composing"):
self.signalInterface.send("contact_typing", (fromAttribute,))
elif ProtocolTreeNode.tagEquals(childNode,"paused"):
self.signalInterface.send("contact_paused",(fromAttribute,))
elif ProtocolTreeNode.tagEquals(childNode,"media") and msgId is not None:
self._d("MULTIMEDIA MESSAGE!");
mediaUrl = messageNode.getChild("media").getAttributeValue("url");
mediaType = messageNode.getChild("media").getAttributeValue("type")
mediaSize = messageNode.getChild("media").getAttributeValue("size")
encoding = messageNode.getChild("media").getAttributeValue("encoding")
mediaPreview = None
if mediaType == "image":
mediaPreview = messageNode.getChild("media").data
if encoding == "raw" and mediaPreview:
mediaPreview = base64.b64encode(mediaPreview)
if isGroup:
self.signalInterface.send("group_imageReceived", (msgId, fromAttribute, author, mediaPreview, mediaUrl, mediaSize, wantsReceipt))
else:
self.signalInterface.send("image_received", (msgId, fromAttribute, mediaPreview, mediaUrl, mediaSize, wantsReceipt))
elif mediaType == "video":
mediaPreview = messageNode.getChild("media").data
if encoding == "raw" and mediaPreview:
mediaPreview = base64.b64encode(mediaPreview)
if isGroup:
self.signalInterface.send("group_videoReceived", (msgId, fromAttribute, author, mediaPreview, mediaUrl, mediaSize, wantsReceipt))
else:
self.signalInterface.send("video_received", (msgId, fromAttribute, mediaPreview, mediaUrl, mediaSize, wantsReceipt))
elif mediaType == "audio":
mediaPreview = messageNode.getChild("media").data
if isGroup:
self.signalInterface.send("group_audioReceived", (msgId, fromAttribute, author, mediaUrl, mediaSize, wantsReceipt))
else:
self.signalInterface.send("audio_received", (msgId, fromAttribute, mediaUrl, mediaSize, wantsReceipt))
elif mediaType == "location":
mlatitude = messageNode.getChild("media").getAttributeValue("latitude")
mlongitude = messageNode.getChild("media").getAttributeValue("longitude")
name = messageNode.getChild("media").getAttributeValue("name")
mediaPreview = messageNode.getChild("media").data
if encoding == "raw" and mediaPreview:
mediaPreview = base64.b64encode(mediaPreview)
if isGroup:
self.signalInterface.send("group_locationReceived", (msgId, fromAttribute, author, name or "", mediaPreview, mlatitude, mlongitude, wantsReceipt))
else:
self.signalInterface.send("location_received", (msgId, fromAttribute, name or "", mediaPreview, mlatitude, mlongitude, wantsReceipt))
elif mediaType =="vcard":
#return
#mediaItem.preview = messageNode.getChild("media").data
vcardData = messageNode.getChild("media").getChild("vcard").toString()
vcardName = messageNode.getChild("media").getChild("vcard").getAttributeValue("name")
if vcardData is not None:
n = vcardData.find(">") +1
vcardData = vcardData[n:]
vcardData = vcardData.replace("</vcard>","")
if isGroup:
self.signalInterface.send("group_vcardReceived", (msgId, fromAttribute, author, vcardName, vcardData, wantsReceipt))
else:
self.signalInterface.send("vcard_received", (msgId, fromAttribute, vcardName, vcardData, wantsReceipt))
else:
self._d("Unknown media type")
return
elif ProtocolTreeNode.tagEquals(childNode,"body") and msgId is not None:
msgData = childNode.data;
#fmsg.setData({"status":0,"key":key.toString(),"content":msgdata,"type":WAXMPP.message_store.store.Message.TYPE_RECEIVED});
elif ProtocolTreeNode.tagEquals(childNode,"received") and fromAttribute is not None and msgId is not None:
if fromAttribute == "s.us":
self.signalInterface.send("profile_setStatusSuccess", ("s.us", msgId,))
return;
#@@TODO autosend ack from client
#print "NEW MESSAGE RECEIVED NOTIFICATION!!!"
#self.connection.sendDeliveredReceiptAck(fromAttribute,msg_id);
self.signalInterface.send("receipt_messageDelivered", (fromAttribute, msgId))
return
elif not (ProtocolTreeNode.tagEquals(childNode,"active")):
if ProtocolTreeNode.tagEquals(childNode,"request"):
wantsReceipt = True;
elif ProtocolTreeNode.tagEquals(childNode,"notify"):
notify_name = childNode.getAttributeValue("name");
elif ProtocolTreeNode.tagEquals(childNode,"delay"):
xmlns = childNode.getAttributeValue("xmlns");
if "urn:xmpp:delay" == xmlns:
stamp_str = childNode.getAttributeValue("stamp");
if stamp_str is not None:
stamp = stamp_str
timestamp = self.parseOfflineMessageStamp(stamp)*1000;
elif ProtocolTreeNode.tagEquals(childNode,"x"):
xmlns = childNode.getAttributeValue("xmlns");
if "jabber:x:event" == xmlns and msgId is not None:
self.signalInterface.send("receipt_messageSent", (fromAttribute, msgId))
elif "jabber:x:delay" == xmlns:
continue; #@@TODO FORCED CONTINUE, WHAT SHOULD I DO HERE? #wtf?
stamp_str = childNode.getAttributeValue("stamp");
if stamp_str is not None:
stamp = stamp_str
timestamp = stamp;
else:
if ProtocolTreeNode.tagEquals(childNode,"delay") or not ProtocolTreeNode.tagEquals(childNode,"received") or msgId is None:
continue;
receipt_type = childNode.getAttributeValue("type");
if receipt_type is None or receipt_type == "delivered":
self.signalInterface.send("receipt_messageDelivered", (fromAttribute, msgId))
elif receipt_type == "visible":
self.signalInterface.send("receipt_visible", (fromAttribute, msgId))
if msgData:
if isGroup:
self.signalInterface.send("group_messageReceived", (msgId, fromAttribute, author, msgData, timestamp, wantsReceipt, pushName))
else:
self.signalInterface.send("message_received", (msgId, fromAttribute, msgData, timestamp, wantsReceipt, pushName))
##@@TODO FROM CLIENT
'''if conversation.type == "group":
if conversation.subject is None:
signal = False
self._d("GETTING GROUP INFO")
self.connection.sendGetGroupInfo(fromAttribute)
'''
#if not len(conversation.getContacts()):
# self._d("GETTING GROUP CONTACTS")
# self.connection.sendGetParticipants(fromAttribute)
'''@@TODO FROM CLIENT
if ret is None:
conversation.incrementNew();
WAXMPP.message_store.pushMessage(fromAttribute,fmsg)
fmsg.key = key
else:
fmsg.key = eval(ret.key)
duplicate = True;
'''
| yah0o2010/whatsploit | Yowsup/connectionmanager.py | Python | mit | 44,255 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-17 18:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('annonce', '0009_auto_20160917_1303'),
]
operations = [
migrations.AlterField(
model_name='user',
name='cv',
field=models.FileField(upload_to='/media/cv/'),
),
]
| firasbenmakhlouf/JobLookup | annonce/migrations/0010_auto_20160917_1854.py | Python | mit | 454 |
#!/usr/bin/python
# =======================================================================
# This file is part of MCLRE.
#
# MCLRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MCLRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MCLRE. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2015 Augusto Queiroz de Macedo <[email protected]>
# =======================================================================
"""
MRBPR Runner
"""
from os import path
from argparse import ArgumentParser
import shlex
import subprocess
import multiprocessing
import logging
from run_rec_functions import read_experiment_atts
from mrbpr.mrbpr_runner import create_meta_file, run
##############################################################################
# GLOBAL VARIABLES
##############################################################################
# Define the Logging
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(name)s : %(message)s',
level=logging.INFO)
LOGGER = logging.getLogger('mrbpr.run_rec_mrbpr')
LOGGER.setLevel(logging.INFO)
##############################################################################
# AUXILIAR FUNCTIONS
##############################################################################
def get_mrbpr_confs():
""" Yield the MRBPR Models Configurations """
pass
##############################################################################
# MAIN
##############################################################################
if __name__ == '__main__':
# ------------------------------------------------------------------------
# Define the argument parser
PARSER = ArgumentParser(description="Script that runs the mrbpr event recommender algorithms for" \
" a given 'experiment_name' with data from a given 'region'")
PARSER.add_argument("-e", "--experiment_name", type=str, required=True,
help="The Experiment Name (e.g. recsys-15)")
PARSER.add_argument("-r", "--region", type=str, required=True,
help="The data Region (e.g. san_jose)")
PARSER.add_argument("-a", "--algorithm", type=str, required=True,
help="The algorithm name (used only to differenciate our proposed MRBPR to the others")
ARGS = PARSER.parse_args()
EXPERIMENT_NAME = ARGS.experiment_name
REGION = ARGS.region
ALGORITHM_NAME = ARGS.algorithm
LOGGER.info(ALGORITHM_NAME)
DATA_DIR = "data"
PARTITIONED_DATA_DIR = path.join(DATA_DIR, "partitioned_data")
PARTITIONED_REGION_DATA_DIR = path.join(PARTITIONED_DATA_DIR, REGION)
EXPERIMENT_DIR = path.join(DATA_DIR, "experiments", EXPERIMENT_NAME)
EXPERIMENT_REGION_DATA_DIR = path.join(EXPERIMENT_DIR, REGION)
# LOGGER.info('Defining the MRBPR relation weights file...')
subprocess.call(shlex.split("Rscript %s %s %s" %
(path.join("src", "recommender_execution", "mrbpr", "mrbpr_relation_weights.R"),
EXPERIMENT_NAME, ALGORITHM_NAME)))
# ------------------------------------------------------------------------
# Reading and Defining the Experiment Attributes
EXPERIMENT_ATTS = read_experiment_atts(EXPERIMENT_DIR)
PARALLEL_RUNS = multiprocessing.cpu_count() - 1
TRAIN_RELATION_NAMES = EXPERIMENT_ATTS['%s_relation_names' % ALGORITHM_NAME.lower()]
TRAIN_RELATION_FILES = ["%s_train.tsv" % name for name in TRAIN_RELATION_NAMES]
PARTITIONS = reversed(EXPERIMENT_ATTS['partitions'])
# ------------------------------------------------------------------------
# Reading and Defining the Experiment Attributes
META_FILE = path.join(EXPERIMENT_DIR, "%s_meetup.meta" % ALGORITHM_NAME.lower())
LOGGER.info('Creating the META relations file...')
create_meta_file(TRAIN_RELATION_NAMES, META_FILE, PARTITIONED_DATA_DIR)
# ------------------------------------------------------------------------
# Fixed parameters
# ------------------------------------------------------------------------
# Algorithm (0 - MRBPR)
ALGORITHM = 0
# Size of the Ranked list of events per User
RANK_SIZE = 100
# Save Parameters
SAVE_MODEL = 0
# Hyper Parameters
REGULARIZATION_PER_ENTITY = ""
REGULARIZATION_PER_RELATION = ""
RELATION_WEIGHTS_FILE = path.join(EXPERIMENT_DIR, "%s_relation_weights.txt" % ALGORITHM_NAME.lower())
# ------------------------------------------------------------------------
if ALGORITHM_NAME == "MRBPR":
LEARN_RATES = [0.1]
NUM_FACTORS = [300]
NUM_ITERATIONS = [1500]
elif ALGORITHM_NAME == "BPR-NET":
LEARN_RATES = [0.1]
NUM_FACTORS = [200]
NUM_ITERATIONS = [600]
else:
LEARN_RATES = [0.1]
NUM_FACTORS = [10]
NUM_ITERATIONS = [10]
MRBPR_BIN_PATH = path.join("src", "recommender_execution", "mrbpr", "mrbpr.bin")
LOGGER.info("Start running MRBPR Process Scheduler!")
run(PARTITIONED_REGION_DATA_DIR, EXPERIMENT_REGION_DATA_DIR,
REGION, ALGORITHM, RANK_SIZE, SAVE_MODEL, META_FILE,
REGULARIZATION_PER_ENTITY, REGULARIZATION_PER_RELATION,
RELATION_WEIGHTS_FILE, TRAIN_RELATION_FILES,
PARTITIONS, NUM_ITERATIONS, NUM_FACTORS, LEARN_RATES,
MRBPR_BIN_PATH, PARALLEL_RUNS, ALGORITHM_NAME)
LOGGER.info("DONE!")
| augustoqm/MCLRE | src/recommender_execution/run_rec_mrbpr.py | Python | gpl-3.0 | 5,871 |
""" Pipeless / examples.py. MIT licensed.
Basic functionality
>>> from pipeless import pipeline
>>> function, run, _ = pipeline(lambda item, e: None)
>>> @function
... def up_one(_): return _+1
>>> list(run([0, 1, 3]))
[1, 2, 4]
>>> @function
... def twofer(_):
... yield _
... yield _
>>> list(run([0, 1, 3]))
[1, 1, 2, 2, 4, 4]
Pipelines are composable
>>> list(run(run([0])))
[2, 2, 2, 2]
Returning None Drops result
>>> @function
... def none(_): return None
>>> list(run([0]))
[]
Exception handler can replace result
>>> function, run, _ = pipeline(lambda item, e: 100)
>>> @function
... def raises_exception():
... def func(_):
... raise Exception
... return func
>>> list(run([0]))
[100]
Grouping up functions
>>> function, run, _ = pipeline(lambda item, e: None)
>>> @function('my_group')
... def nothing_special(_): return _
>>> list(run([1,2,3]))
[1, 2, 3]
>>> @function('baller_group')
... def triple_double(_):
... return 3*(_**2)
>>> list(run([1,2,3]))
[3, 12, 27]
>>> @function('my_group')
... def zeroed(_): return 0
>>> list(run([1,2,3]))
[0, 0, 0]
>>> list(run([1,2,3], function_groups_to_skip=['my_group']))
[3, 12, 27]
>>> list(run([1,2,3], function_groups_to_skip=['my_group', 'baller_group']))
[1, 2, 3]
Function Builders
>>> function, run, _ = pipeline(lambda item, e: None, use_builders=True)
>>> @function
... def bob_the_builder(): return lambda _: _+1
>>> list(run([1,2,3]))
[2, 3, 4]
"""
| andychase/pipeless | examples.py | Python | mit | 1,451 |
# -*- coding: utf-8 -*-
import json
import datetime
from datetime import date
import turbotlib
from bs4 import BeautifulSoup
import bs4
import requests
#FUNCTIONS
#retrieve a document at a given URL as parsed html tree
def get_doc(source_url):
response = requests.get(source_url)
html = response.content
doc = BeautifulSoup(html)
return doc
def parse_table(table, further_detail=False):
items = [] #output
try:
rows = table.find_all("tr") #input
#extract headers
headers = []
header_row = rows[0]
for th in header_row.find_all("th"):
header = th.text.strip().capitalize()
headers.append(header)
#go through the rows - turn each into an object
for row in rows[1:]:
td_list = row.find_all("td")
td_index = 0 #record how far through columns we are, so we can find right header
item = {} #make item to store findings
for td in td_list:
value = td.text.strip()
if ((len(value) > 0) and (value != " ") and (value != " ")):
item[headers[td_index]] = value
#if we have a link then get the url for further details
if (further_detail):
if (td.a != None):
url = td.a['href']
if (url[:7] != "mailto:"):
item['detail_url'] = base_href + url
#ready for next column
td_index += 1
if (len(item) > 0):
items.append(item)
except:
pass
return items
#urls to use
base_href = "http://www.asfromania.ro/registru/"
front_url = base_href + "lista.php?listasect=1&lng=2"
#get going
sample_date = str(date.today())
turbotlib.log("Starting run on " + sample_date) # Optional debug logging
#Step 1: extract list of categories from front page
try:
categories = [] #store the list as we find them
front_page = get_doc(front_url)
category_list = front_page.find("table", id="listaEntitati")
category_rows = category_list.find_all("tr")
current_category = None #maintain link to current category
for row in category_rows:
td_list = row.find_all("td")
#deal only with non-empty rows
if (len(td_list) > 0):
#identify categories with sub-categories. to avoid double counting, we'll only add the subcategories to the list
if (td_list[0].img != None):
category = {
'number': td_list[1].text.strip(),
'symbol': td_list[2].text.strip(),
'name': td_list[3].text.strip(),
'definition': base_href + td_list[4].a['href'],
'url': base_href + td_list[3].a['href'],
}
current_category = category #move link to match this one
#else we either have a main category with no subcategories, or this is a subcategory
else:
#if this has a red circle in column 2, it's a subcategory
if (td_list[1].img != None):
subcategory = {
'number': current_category['number'],
'symbol': td_list[2].text.strip(),
'name': td_list[3].text.strip(),
'definition': current_category['definition'],
'url': base_href + td_list[3].a['href']
}
categories.append(subcategory)
#otherwise, it's an undivided main category
else:
undivided_category = {
'number': td_list[1].text.strip(),
'symbol': td_list[2].text.strip(),
'name': td_list[3].text.strip(),
'definition': base_href + td_list[4].a['href'],
'url': base_href + td_list[3].a['href']
}
categories.append(undivided_category)
#monitor progress
turbotlib.log(str(len(categories)) + " categories identified")
#Step 2: work out what we'll actually want to parse
category_count = 1
for category in categories:
#go get the page and table of details
try:
category_page = get_doc(category['url'])
category_table = category_page.find("table", id="listaEntitati")
category_items = parse_table(category_table, True)
#monitor progress
turbotlib.log(str(len(category_items)) + " items identified in category " + category['symbol'] + " (" + str(category_count) + " / " + str(len(categories)) + ")")
category_count += 1
#Step 3: go and get the details on each one of those entities (doing this while we look at the category)
item_count = 1
for item in category_items:
turbotlib.log(" Parsing item " + str(item_count) + " / " + str(len(category_items)) + " in category " + category['symbol'])
try:
detail_page = get_doc(item['detail_url'])
#extract the name from the top of the page
company_name = detail_page.table.text.strip().title()
item['company_name'] = company_name
#all the details we can extract are in tables in frames, with regular layout
iframes = detail_page.find_all("iframe")
for iframe in iframes:
try:
label = iframe.parent.parent.parent.parent.attrs['id']
iframe_src = base_href + iframe['src']
iframe_doc = get_doc(iframe_src)
iframe_table = iframe_doc.table
if (iframe_table != None):
details = parse_table(iframe_table)
#only one row means add things directly
if (len(details) == 1):
if (len(details[0]) == 1): #literally only one field, so add it straight to object
item[details[0].keys()[0]] = details[0].values()[0]
else:
item[label] = details[0]
elif (len(details) > 1): #more than one row, so add a list
item[label] = details #add to output
except:
pass
#output our results - after adding metadata
if (len(item) > 0):
item['sample_date'] = sample_date
item['source_url'] = category['url']
item['category'] = {
'name': category['name'],
'number': category['number'],
'symbol': category['symbol'],
'definition_url': category['definition']
}
item['source'] = "Financial Supervisory Authority, Romania"
print(json.dumps(item))
except:
pass
item_count += 1
except:
pass
except:
pass
#just confirm we're done
turbotlib.log("Finished run.") | dinotash/opencorporates | ro-sec-licences/scraper.py | Python | mit | 5,840 |
# -*- coding: utf-8 -*-
#
# Ceres Solver documentation build configuration file, created by
# sphinx-quickstart on Sun Jan 20 20:34:07 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Ceres Solver'
copyright = u'2014 Google Inc'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.11'
# The full version, including alpha/beta/rc tags.
release = '1.11.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_themes",]
import sphinx_rtd_theme
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Ceres Solver"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'CeresSolverdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CeresSolver.tex', u'Ceres Solver',
u'Sameer Agarwal \\& Keir Mierle', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ceressolver', u'Ceres Solver',
[u'Sameer Agarwal & Keir Mierle'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'CeresSolver', u'Ceres Solver',
u'Sameer Agarwal & Keir Mierle', 'CeresSolver', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| af-silva/cartographer_ros | thirdparty/ceres_solver/docs/source/conf.py | Python | apache-2.0 | 7,935 |
"""
Test thread stepping features in combination with frame select.
"""
import lldb
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
class ThreadSteppingTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to of function 'c'.
self.line1 = line_number(
'main.c', '// Find the line number of function "c" here.')
self.line2 = line_number(
'main.c', '// frame select 2, thread step-out while stopped at "c(1)"')
self.line3 = line_number(
'main.c', '// thread step-out while stopped at "c(2)"')
self.line4 = line_number(
'main.c', '// frame select 1, thread step-out while stopped at "c(3)"')
def test_step_out_with_run_command(self):
"""Exercise thread step-out and frame select followed by thread step-out."""
self.build()
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Create a breakpoint inside function 'c'.
lldbutil.run_break_set_by_file_and_line(
self, "main.c", self.line1, num_expected_locations=1, loc_exact=True)
# Now run the program.
self.runCmd("run", RUN_SUCCEEDED)
# The process should be stopped at this point.
self.expect("process status", PROCESS_STOPPED,
patterns=['Process .* stopped'])
# The frame #0 should correspond to main.c:32, the executable statement
# in function name 'c'. And frame #3 should point to main.c:37.
self.expect("thread backtrace", STOPPED_DUE_TO_BREAKPOINT,
substrs=["stop reason = breakpoint"],
patterns=["frame #0.*main.c:%d" % self.line1,
"frame #3.*main.c:%d" % self.line2])
# We want to move the pc to frame #3. This can be accomplished by
# 'frame select 2', followed by 'thread step-out'.
self.runCmd("frame select 2")
self.runCmd("thread step-out")
self.expect("thread backtrace", STEP_OUT_SUCCEEDED,
substrs=["stop reason = step out"],
patterns=["frame #0.*main.c:%d" % self.line2])
# Let's move on to a single step-out case.
self.runCmd("process continue")
# The process should be stopped at this point.
self.expect("process status", PROCESS_STOPPED,
patterns=['Process .* stopped'])
self.runCmd("thread step-out")
self.expect("thread backtrace", STEP_OUT_SUCCEEDED,
substrs=["stop reason = step out"],
patterns=["frame #0.*main.c:%d" % self.line3])
# Do another frame selct, followed by thread step-out.
self.runCmd("process continue")
# The process should be stopped at this point.
self.expect("process status", PROCESS_STOPPED,
patterns=['Process .* stopped'])
self.runCmd("frame select 1")
self.runCmd("thread step-out")
self.expect("thread backtrace", STEP_OUT_SUCCEEDED,
substrs=["stop reason = step out"],
patterns=["frame #0.*main.c:%d" % self.line4])
| endlessm/chromium-browser | third_party/llvm/lldb/test/API/lang/c/stepping/TestThreadStepping.py | Python | bsd-3-clause | 3,367 |
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
from fest.views import score, rateMe, home, result_action, ItemListView, ItemDetailScoreView, ItemDetailView, confirm_rating, save_score, SpecialAwardListView
from django.contrib import admin
from django.contrib.auth.views import logout, password_change, password_change_done
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', home, name='home'),
url(r'^score/$', score, name='items'),
url(r'^awards/$', SpecialAwardListView.as_view(), name='awards'),
url(r'^report/$', ItemListView.as_view(), name='item_report'),
url(r'^report/(?P<pk>\d+)$', ItemDetailView.as_view(), name='item_rating_report'),
url(r'^score/(?P<pk>\d+)$', ItemDetailScoreView.as_view(), name='item_jury_scoring'),
url(r'^score/save$', save_score, name='save_score'),
url(r'^resultaction/$', result_action, name='resultaction'),
url(r'^admin/', include(admin.site.urls)),
url(r'^rate/$', rateMe, name='rating'),
url(r'^rating/confirm$', confirm_rating, name='confirm_rating'),
url(r'^logout/$', logout, {'next_page': '/'}, name='logout'),
url(r'^password/$', password_change,{'template_name':'password_change_form.html'}, name='change_password'),
#url(r'^password/$', password_change, name='change_password'),
url(r'^password/done$', password_change_done,{'template_name':'password_change_done.html'}, name='password_change_done'),
)
if settings.DEBUG and settings.MEDIA_ROOT:
urlpatterns += patterns('',
(r'^files/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT}))
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| rashivkp/animation-fest | fest/urls.py | Python | agpl-3.0 | 1,781 |
#!/usr/bin/env python
# Copyright (C) 2009-2010:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
class TestConfig(ShinkenTest):
def setUp(self):
self.setup_with_file('etc/nagios_broken_1.cfg')
def test_conf_is_correct(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
self.assert_(not self.conf.conf_is_correct)
#self.show_logs()
if __name__ == '__main__':
unittest.main()
| wbsavage/shinken | test/test_config.py | Python | agpl-3.0 | 1,302 |
# Copyright (C) 2010 by Kevin Saff
# This file is part of the CA scanner.
# The CA scanner is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# The CA scanner is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with the CA scanner. If not, see <http://www.gnu.org/licenses/>.
import numpy
class torus(object):
@staticmethod
def stitch(array, margin=1):
if isinstance(array, list):
for chart in array:
torus.stitch(chart, margin)
return
if margin == 1: #optimization
return torus.stitch_1(array, margin)
array[-margin:,...] = array[margin:margin * 2,...]
array[:margin,...] = array[-margin * 2:-margin,...]
array[:,-margin:,...] = array[:,margin:margin * 2,...]
array[:,:margin,...] = array[:,-margin * 2:-margin,...]
@staticmethod
def stitch_1(array, margin = 1):
array[-1,:,...] = array[1,:,...]
array[0,:,...] = array[-2,:,...]
array[:,-1,...] = array[:,1,...]
array[:,0,...] = array[:,-2,...]
@staticmethod
def map_point(point, array, margin = 1):
return (point[0] % (array.shape[0] - margin*2) + margin,
point[1] % (array.shape[1] - margin*2) + margin)
@staticmethod
def map_slice(upper_left, array, margin = 1):
x0, y0 = torus.map_point(upper_left, array, margin)
x1, y1 = (array.shape[0] - margin,
array.shape[1] - margin)
return array[x0:x1, y0:y1]
def torusfall(fall):
class torusfall(torus):
@staticmethod
def stitch(array, margin=1):
if isinstance(array, list):
for chart in array:
torusfall.stitch(chart, margin)
return
torus.stitch(array, margin)
array[:,:margin] += fall
array[:,-margin:] -= fall
return torusfall
class rectangle(object):
@staticmethod
def stitch(array, margin = 1):
return
@staticmethod
def map_point(point, array, margin = 1):
return (point[0] + margin, point[1] + margin)
@staticmethod
def map_slice(upper_left, array, margin = 1):
x, y = rectangle.map_point(upper_left, array, margin)
if x < margin and y < margin:
return numpy.zeros(shape=(margin-x, margin-y), dtype=numpy.uint8)
elif x < margin:
return numpy.zeros(shape=(margin-x,
array.shape[1]-2*margin),
dtype=numpy.uint8)
elif y < margin:
return numpy.zeros(shape=(array.shape[0]-2*margin,
margin-y),
dtype=numpy.uint8)
elif x >= array.shape[0] - margin or y >= array.shape[1] - margin:
return numpy.zeros(shape=array.shape, dtype=array.dtype)
else:
if margin:
return array[x:-margin, y:-margin]
else:
return array[x:, y:]
class projective_plane(object):
@staticmethod
def stitch(array, margin = 1):
if margin == 1: #optimization
return projective_plane.stitch_1(array, margin)
array[-margin:,:] = array[margin:margin * 2,::-1]
array[:margin,:] = array[-margin * 2:-margin,::-1]
array[:,-margin:] = array[::-1,margin:margin * 2]
array[:,:margin] = array[::-1,-margin * 2:-margin]
@staticmethod
def stitch_1(array, margin = 1):
array[-1,:] = array[1,::-1]
array[0,:] = array[-2,::-1]
array[:,-1] = array[::-1,1]
array[:,0] = array[::-1,-2]
@staticmethod
def map_point(point, array, margin = 1):
d = [(point[i] // (array.shape[i] - margin*2)) % 2 for i in (0, 1)]
r = [(point[i] % (array.shape[i] - margin*2)) + margin for i in (0, 1)]
for i in (0, 1):
if d[1 - i]:
r[i] = array.shape[i] - 1 - r[i]
return r
@staticmethod
def map_slice(upper_left, array, margin = 1):
d = [(upper_left[i] // (array.shape[i] - margin*2)) % 2 for i in (0, 1)]
x0, y0 = projective_plane.map_point(upper_left, array, margin)
if not d[1]:
x1 = array.shape[0] - margin
else:
x1 = margin - 1
if not d[0]:
y1 = array.shape[1] - margin
else:
y1 = margin - 1
return array[x0:x1:(1 - 2*d[1]), y0:y1:(1 - 2*d[0])]
class patchwork(object):
def __init__(self, connections):
self.connections = connections
# Setup some stuff for mapping points.
self.next = [{}, {}]
self.prev = [{}, {}]
for connection in connections:
first, second = connection[:2]
assert first <= second
direction = connection[2]
sign = -1 if direction.startswith('-') else +1
dim = 'xy'.index(direction[-1])
self.next[dim][first] = (second, sign)
self.prev[dim][first] = (second, sign)
def stitch(array, margin=1):
for connection in self.connections:
first, second = connection[:2]
direction = connection[2]
fun = connection[3] if len(connection) > 3 else None
invfun = connection[4] if len(connection) > 4 else None
if not fun:
fun = invfun = lambda x: x
elif isinstance(fun, (int, float, long)):
fun = lambda x: x + fun
invfun = lambda x: x - fun
elif isinstance(fun, tuple):
fun = lambda x: (x + fun[0]) % fun[1]
fun = lambda x: (x - fun[0]) % fun[1]
if direction.startswith('-'):
D = -1
else:
D = +1
if direction.endswith('x'):
array[second][-margin:,:] = fun(array[first][margin:margin * 2,::D])
array[first][:margin,:] = fun(array[second][-margin * 2:-margin,::D])
elif direction.endswith('y'):
array[second][:,-margin:] = fun(array[first][::D,margin:margin * 2])
array[first][:,:margin] = fun(array[second][::D,-margin * 2:-margin])
def map_point(point, array, margin = 1):
off = [0, 0]
sign = [1, 1]
index = 0
for i in (0, 1):
while off[i] + array[index].shape[i] - margin*2 < point:
off[i] += array[index].shape[i] - margin*2
if index not in self.next[i]:
return None
index, ns = self.next[i][index]
sign[i] *= ns
while off[i] > point:
if index not in self.prev[i]:
return None
index, ns = self.prev[i][index]
sign[i] *= ns
off[i] -= array[index].shape[i] - margin*2
return (index,
point[0] - off[0] + margin,
point[1] - off[1] + margin)
def map_slice(upper_left, array, margin = 1):
index, x0, y0 = self.map_point(upper_left, array, margin)
x1, y1 = (array[index].shape[0] - margin,
array[index].shape[1] - margin)
return array[index][x0:x1, y0:y1]
| kcsaff/CA | src/charts/_scantop.py | Python | gpl-3.0 | 7,923 |
# -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <[email protected]>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from openerp.osv import fields, orm
from openerp.tools.translate import _
class hr_attendance(orm.Model):
_name = 'hr.attendance'
_inherit = 'hr.attendance'
_columns = {
'state': fields.selection((
('draft', 'Unverified'), (
'verified', 'Verified'), ('locked', 'Locked'),
), 'State', required=True, readonly=True),
}
_defaults = {
'state': 'draft',
}
def is_locked(self, cr, uid, employee_id, utcdt_str, context=None):
res = False
pp_obj = self.pool.get('hr.payroll.period')
ee_data = self.pool.get('hr.employee').read(
cr, uid, employee_id,
['contract_ids'], context=context)
pp_ids = pp_obj.search(cr, uid, [
('state', 'in', [
'locked', 'generate', 'payment', 'closed']),
'&', ('date_start', '<=', utcdt_str),
('date_end', '>=', utcdt_str),
], context=context)
for pp in pp_obj.browse(cr, uid, pp_ids, context=context):
pp_contract_ids = [c.id for c in pp.schedule_id.contract_ids]
for c_id in ee_data['contract_ids']:
if c_id in pp_contract_ids:
res = True
break
if res is True:
break
return res
def create(self, cr, uid, vals, context=None):
if self.is_locked(
cr, uid, vals['employee_id'], vals['name'], context=context
):
ee_data = self.pool.get(
'hr.employee').read(cr, uid, vals['employee_id'], ['name'],
context=context)
raise orm.except_orm(
_('The period is Locked!'),
_("You may not add an attendance record to a locked period.\n"
"Employee: %s\n"
"Time: %s") % (ee_data['name'], vals['name']))
return super(hr_attendance, self).create(
cr, uid, vals, context=context)
def unlink(self, cr, uid, ids, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
for punch in self.browse(cr, uid, ids, context=context):
if punch.state in ['verified', 'locked']:
raise orm.except_orm(
_('The Record cannot be deleted!'),
_("You may not delete a record that is in a %s state:\n"
"Employee: %s, Date: %s, Action: %s")
% (
punch.state, punch.employee_id.name, punch.name,
punch.action))
return super(hr_attendance, self).unlink(cr, uid, ids, context=context)
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
for punch in self.browse(cr, uid, ids, context=context):
if (
punch.state in ['verified', 'locked']
and (
vals.get('name') or vals.get('action')
or vals.get('employee_id'))
):
raise orm.except_orm(
_('The record cannot be modified!'),
_("You may not write to a record that is in a %s state:\n"
"Employee: %s, Date: %s, Action: %s")
% (
punch.state, punch.employee_id.name, punch.name,
punch.action))
return super(hr_attendance, self).write(
cr, uid, ids, vals, context=context)
| Endika/hr | hr_payroll_period/hr_attendance.py | Python | agpl-3.0 | 4,381 |
# coding: utf-8
r"""Offset operations"""
import logging
from OCC.BRepOffset import BRepOffset_Skin
from OCC.BRepOffsetAPI import BRepOffsetAPI_MakeOffset, \
BRepOffsetAPI_MakeOffsetShape
from OCC.GeomAbs import GeomAbs_Arc, GeomAbs_Tangent, GeomAbs_Intersection
from aocutils.exceptions import OffsetShapeException
from aocutils.topology import shape_to_topology
from aocutils.tolerance import OCCUTILS_DEFAULT_TOLERANCE
logger = logging.getLogger(__name__)
def offset_shape(shape_to_offset,
offset_distance,
tolerance=OCCUTILS_DEFAULT_TOLERANCE,
offset_mode=BRepOffset_Skin,
intersection=False,
selfintersection=False,
join_type=GeomAbs_Arc):
r"""Builds an offset shell from a shape construct
an offset version of the shape
Parameters
----------
shape_to_offset
offset_distance : float
tolerance : float
offset_mode : BRepOffset_*, optional
(the default is BRepOffset_Skin)
intersection : bool
selfintersection : bool
join_type : GeomAbs_*
(the default is GeomAbs_Arc)
Returns
-------
OCC.TopoDS.TopoDS_Shape
"""
try:
an_offset = BRepOffsetAPI_MakeOffsetShape(shape_to_offset,
offset_distance,
tolerance,
offset_mode,
intersection,
selfintersection,
join_type)
if an_offset.IsDone():
return an_offset.Shape()
else:
msg = "Offset shape not done"
logger.error(msg)
raise OffsetShapeException(msg)
except RuntimeError:
msg = "Failed to offset shape"
logger.error(msg)
raise OffsetShapeException(msg)
def offset(wire_or_face,
offset_distance,
altitude=0,
join_type=GeomAbs_Arc):
r"""Builds a offset wire or face from a wire or face
construct an offset version of the shape
Parameters
----------
wire_or_face
the wire or face to offset
offset_distance : float
the distance to offset
altitude : float
move the offset shape to altitude from the normal of the wire or face
join_type
the geom_type of offset you want can be one of
GeomAbs_Arc, GeomAbs_Tangent,
GeomAbs_Intersection
Returns
-------
OCC.TopoDS.TopoDS_Shape
Notes
-----
A shape that has a negative offsetDistance will return a sharp corner
"""
_joints = [GeomAbs_Arc,
GeomAbs_Tangent,
GeomAbs_Intersection]
# assert join_type in _joints, '%s is not one of %s' % (join_type, _joints)
if join_type not in _joints:
msg = '%s is not one of %s' % (join_type, _joints)
logger.error(msg)
raise ValueError(msg)
try:
an_offset = BRepOffsetAPI_MakeOffset(wire_or_face, join_type)
an_offset.Perform(offset_distance, altitude)
if an_offset.IsDone():
return shape_to_topology(an_offset.Shape())
else:
msg = "offset not done"
logger.error(msg)
raise OffsetShapeException(msg)
except RuntimeError:
msg = "failed to offset"
logger.error(msg)
raise OffsetShapeException(msg)
| guillaume-florent/aoc-utils | aocutils/operations/offset.py | Python | lgpl-3.0 | 3,535 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.