code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
import json
from django.contrib.auth.models import User
from django.test import TestCase
try:
from django.core.urlresolvers import reverse
except ImportError: # Django 2.0
from django.urls import reverse
class DjangoQLAdminTest(TestCase):
def setUp(self):
self.credentials = {'username': 'test', 'password': 'lol'}
User.objects.create_superuser(email='[email protected]', **self.credentials)
def get_json(self, url, status=200, **kwargs):
response = self.client.get(url, **kwargs)
self.assertEqual(status, response.status_code)
try:
return json.loads(response.content.decode('utf8'))
except ValueError:
self.fail('Not a valid json')
def test_introspections(self):
url = reverse('admin:core_book_djangoql_introspect')
# unauthorized request should be redirected
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertTrue(self.client.login(**self.credentials))
# authorized request should be served
introspections = self.get_json(url)
self.assertEqual('core.book', introspections['current_model'])
for model in ('core.book', 'auth.user', 'auth.group'):
self.assertIn(model, introspections['models'])
def test_djangoql_syntax_help(self):
url = reverse('admin:djangoql_syntax_help')
# unauthorized request should be redirected
response = self.client.get(url)
self.assertEqual(302, response.status_code)
self.assertTrue(self.client.login(**self.credentials))
# authorized request should be served
response = self.client.get(url)
self.assertEqual(200, response.status_code)
def test_suggestions(self):
url = reverse('admin:core_book_djangoql_suggestions')
# unauthorized request should be redirected
response = self.client.get(url)
self.assertEqual(302, response.status_code)
# authorize for the next checks
self.assertTrue(self.client.login(**self.credentials))
# field parameter is mandatory
r = self.get_json(url, status=400)
self.assertEqual(r.get('error'), '"field" parameter is required')
# check for unknown fields
r = self.get_json(url, status=400, data={'field': 'gav'})
self.assertEqual(r.get('error'), 'Unknown field: gav')
r = self.get_json(url, status=400, data={'field': 'x.y'})
self.assertEqual(r.get('error'), 'Unknown model: core.x')
r = self.get_json(url, status=400, data={'field': 'auth.user.lol'})
self.assertEqual(r.get('error'), 'Unknown field: lol')
# field with choices
r = self.get_json(url, data={'field': 'genre'})
self.assertEqual(r, {
'page': 1,
'has_next': False,
'items': ['Drama', 'Comics', 'Other'],
})
# test that search is working
r = self.get_json(url, data={'field': 'genre', 'search': 'o'})
self.assertEqual(r, {
'page': 1,
'has_next': False,
'items': ['Comics', 'Other'],
})
# ensure that page parameter is checked correctly
r = self.get_json(url, status=400, data={'field': 'genre', 'page': 'x'})
self.assertEqual(
r.get('error'),
"invalid literal for int() with base 10: 'x'",
)
r = self.get_json(url, status=400, data={'field': 'genre', 'page': '0'})
self.assertEqual(
r.get('error'),
'page must be an integer starting from 1',
)
# check that paging after results end works correctly
r = self.get_json(url, data={'field': 'genre', 'page': 2})
self.assertEqual(r, {
'page': 2,
'has_next': False,
'items': [],
})
def test_query(self):
url = reverse('admin:core_book_changelist') + '?q=price=0'
self.assertTrue(self.client.login(**self.credentials))
response = self.client.get(url)
# There should be no error at least
self.assertEqual(200, response.status_code)
| ivelum/djangoql | test_project/core/tests/test_admin.py | Python | mit | 4,169 |
from django.apps import AppConfig
class ChunksConfig(AppConfig):
name = 'chunks'
verbose_name = 'Chunks'
| vaal-/il2_stats | src/chunks/apps.py | Python | mit | 121 |
import datetime
import decimal
import re
import random
import logging
from cStringIO import StringIO
from string import letters
from hashlib import md5
from unittest import skipIf
# LIBRARIES
import django
from django.conf import settings
from django.core.files.uploadhandler import StopFutureHandlers
from django.core.cache import cache
from django.core.exceptions import ValidationError
from django.db import connection as default_connection, DataError, models
from django.db.models.query import Q
from django.forms import ModelForm
from django.test import RequestFactory
from django.utils.safestring import SafeText
from django.forms.models import modelformset_factory
from google.appengine.api.datastore_errors import EntityNotFoundError, TransactionFailedError
from google.appengine.datastore import datastore_rpc
from google.appengine.api import datastore
from google.appengine.ext import deferred
from google.appengine.api import taskqueue
from django.test.utils import override_settings
from django.core.exceptions import FieldError
from django.template import Template, Context
# DJANGAE
from djangae.contrib import sleuth
from djangae.fields import CharField
from djangae.test import inconsistent_db, TestCase
from django.db import IntegrityError, NotSupportedError
from djangae.db.backends.appengine.commands import FlushCommand
from djangae.db import constraints
from djangae.db.constraints import UniqueMarker, UniquenessMixin
from djangae.db.unique_utils import _unique_combinations, unique_identifiers_from_entity
from djangae.db.backends.appengine.indexing import add_special_index, IExactIndexer, get_indexer
from djangae.db.backends.appengine import indexing
from djangae.db.utils import entity_matches_query, decimal_to_string, normalise_field_value
from djangae.db.caching import disable_cache
from djangae.fields import SetField, ListField, RelatedSetField
from djangae.storage import BlobstoreFileUploadHandler
from djangae.core import paginator
DEFAULT_NAMESPACE = default_connection.ops.connection.settings_dict.get("NAMESPACE")
try:
import webtest
except ImportError:
webtest = NotImplemented
class TestUser(models.Model):
username = models.CharField(max_length=32)
email = models.EmailField()
last_login = models.DateField(auto_now_add=True)
field2 = models.CharField(max_length=32)
def __unicode__(self):
return self.username
class Meta:
app_label = "djangae"
class ModelWithNullableCharField(models.Model):
field1 = models.CharField(max_length=500, null=True)
some_id = models.IntegerField(default=0)
class Meta:
app_label = "djangae"
class UniqueModel(models.Model):
unique_field = models.CharField(max_length=100, unique=True)
unique_combo_one = models.IntegerField(blank=True, default=0)
unique_combo_two = models.CharField(max_length=100, blank=True, default="")
unique_relation = models.ForeignKey('self', null=True, blank=True, unique=True)
unique_set_field = SetField(models.CharField(max_length=500), unique=True)
unique_list_field = ListField(models.CharField(max_length=500), unique=True)
unique_together_list_field = ListField(models.IntegerField())
class Meta:
unique_together = [
("unique_combo_one", "unique_combo_two"),
("unique_together_list_field", "unique_combo_one")
]
app_label = "djangae"
class UniqueModelWithLongPK(models.Model):
long_pk = models.CharField(max_length=500, primary_key=True)
unique_field = models.IntegerField(unique=True)
class IntegerModel(models.Model):
integer_field = models.IntegerField()
class Meta:
app_label = "djangae"
class TestFruit(models.Model):
name = models.CharField(primary_key=True, max_length=32)
origin = models.CharField(max_length=32, default="Unknown")
color = models.CharField(max_length=100)
is_mouldy = models.BooleanField(default=False)
text_field = models.TextField(blank=True, default="")
binary_field = models.BinaryField(blank=True)
class Meta:
ordering = ("color",)
app_label = "djangae"
def __unicode__(self):
return self.name
def __repr__(self):
return "<TestFruit: name={}, color={}>".format(self.name, self.color)
class Permission(models.Model):
user = models.ForeignKey(TestUser)
perm = models.CharField(max_length=32)
def __unicode__(self):
return u"{0} for {1}".format(self.perm, self.user)
class Meta:
ordering = ('user__username', 'perm')
app_label = "djangae"
class SelfRelatedModel(models.Model):
related = models.ForeignKey('self', blank=True, null=True)
class Meta:
app_label = "djangae"
class MultiTableParent(models.Model):
parent_field = models.CharField(max_length=32)
class Meta:
app_label = "djangae"
class MultiTableChildOne(MultiTableParent):
child_one_field = models.CharField(max_length=32)
class Meta:
app_label = "djangae"
class MultiTableChildTwo(MultiTableParent):
child_two_field = models.CharField(max_length=32)
class Meta:
app_label = "djangae"
class Relation(models.Model):
class Meta:
app_label = "djangae"
class Related(models.Model):
headline = models.CharField(max_length=500)
relation = models.ForeignKey(Relation)
class Meta:
app_label = "djangae"
class NullDate(models.Model):
date = models.DateField(null=True, default=None)
datetime = models.DateTimeField(null=True, default=None)
time = models.TimeField(null=True, default=None)
class Meta:
app_label = "djangae"
class NullDateSet(models.Model):
dates = RelatedSetField(NullDate, blank=True, unique=True)
class Meta:
app_label = "djangae"
class ModelWithUniques(models.Model):
name = models.CharField(max_length=64, unique=True)
class Meta:
app_label = "djangae"
class ModelWithUniquesOnForeignKey(models.Model):
name = models.CharField(max_length=64, unique=True)
related_name = models.ForeignKey(ModelWithUniques, unique=True)
class Meta:
unique_together = [("name", "related_name")]
app_label = "djangae"
class ModelWithDates(models.Model):
start = models.DateField()
end = models.DateField()
class Meta:
app_label = "djangae"
class ModelWithUniquesAndOverride(models.Model):
name = models.CharField(max_length=64, unique=True)
class Djangae:
disable_constraint_checks = False
class Meta:
app_label = "djangae"
class SpecialIndexesModel(models.Model):
name = models.CharField(max_length=255, primary_key=True)
nickname = CharField(blank=True)
sample_list = ListField(models.CharField)
def __unicode__(self):
return self.name
class Meta:
app_label = "djangae"
class DateTimeModel(models.Model):
datetime_field = models.DateTimeField(auto_now_add=True)
date_field = models.DateField(auto_now_add=True)
class Meta:
app_label = "djangae"
if django.VERSION >= (1, 8):
# DurationField was only introducd in Django 1.8
class DurationModel(models.Model):
duration_field = models.DurationField()
duration_field_nullable = models.DurationField(blank=True, null=True)
class PaginatorModel(models.Model):
foo = models.IntegerField()
class Meta:
app_label = "djangae"
class BackendTests(TestCase):
def test_pk_gt_empty_returns_all(self):
for i in range(10):
TestFruit.objects.create(name=str(i), color=str(i))
self.assertEqual(10, TestFruit.objects.filter(pk__gt="").count())
self.assertEqual(10, TestFruit.objects.filter(pk__gte="").count())
self.assertEqual(0, TestFruit.objects.filter(pk__lt="").count())
self.assertEqual(0, TestFruit.objects.filter(pk__lte="").count())
def test_pk_gt_zero_returns_all(self):
IntegerModel.objects.create(pk=1, integer_field=1)
IntegerModel.objects.create(pk=2, integer_field=2)
results = IntegerModel.objects.filter(pk__gt=0)
self.assertEqual(2, len(results))
results = IntegerModel.objects.filter(pk__gte=0)
self.assertEqual(2, len(results))
results = IntegerModel.objects.filter(pk__lt=0)
self.assertEqual(0, len(results))
results = IntegerModel.objects.filter(pk__lte=0)
self.assertEqual(0, len(results))
def test_entity_matches_query(self):
entity = datastore.Entity("test_model")
entity["name"] = "Charlie"
entity["age"] = 22
query = datastore.Query("test_model")
query["name ="] = "Charlie"
self.assertTrue(entity_matches_query(entity, query))
query["age >="] = 5
self.assertTrue(entity_matches_query(entity, query))
del query["age >="]
query["age <"] = 22
self.assertFalse(entity_matches_query(entity, query))
del query["age <"]
query["age <="] = 22
self.assertTrue(entity_matches_query(entity, query))
del query["age <="]
query["name ="] = "Fred"
self.assertFalse(entity_matches_query(entity, query))
# If the entity has a list field, then if any of them match the
# query then it's a match
entity["name"] = [ "Bob", "Fred", "Dave" ]
self.assertTrue(entity_matches_query(entity, query)) # ListField test
def test_exclude_pks_with_slice(self):
for i in range(10):
TestFruit.objects.create(name=str(i), color=str(i))
to_exclude = [ str(x) for x in range(5) + range(15,20) ]
to_return = TestFruit.objects.exclude(pk__in=set(to_exclude)).values_list("pk", flat=True)[:2]
self.assertEqual(2, len(to_return))
qs = TestFruit.objects.filter(
pk__in=to_return
)
self.assertEqual(2, len(qs))
def test_count_on_excluded_pks(self):
TestFruit.objects.create(name="Apple", color="Red")
TestFruit.objects.create(name="Orange", color="Orange")
self.assertEqual(1, TestFruit.objects.filter(pk__in=["Apple", "Orange"]).exclude(pk__in=["Apple"]).count())
def test_defaults(self):
fruit = TestFruit.objects.create(name="Apple", color="Red")
self.assertEqual("Unknown", fruit.origin)
instance = datastore.Get(datastore.Key.from_path(TestFruit._meta.db_table, fruit.pk, namespace=DEFAULT_NAMESPACE))
del instance["origin"]
datastore.Put(instance)
fruit = TestFruit.objects.get()
self.assertIsNone(fruit.origin)
fruit.save()
fruit = TestFruit.objects.get()
self.assertEqual("Unknown", fruit.origin)
@disable_cache()
def test_get_by_keys(self):
colors = [ "Red", "Green", "Blue", "Yellow", "Orange" ]
fruits = [ TestFruit.objects.create(name=str(x), color=random.choice(colors)) for x in range(32) ]
# Check that projections work with key lookups
with sleuth.watch('google.appengine.api.datastore.Query.__init__') as query_init:
with sleuth.watch('google.appengine.api.datastore.Query.Ancestor') as query_anc:
TestFruit.objects.only("color").get(pk="0").color
self.assertEqual(query_init.calls[0].kwargs["projection"], ["color"])
# Make sure the query is an ancestor of the key
self.assertEqual(query_anc.calls[0].args[1], datastore.Key.from_path(TestFruit._meta.db_table, "0", namespace=DEFAULT_NAMESPACE))
# Now check projections work with fewer than 100 things
with sleuth.watch('djangae.db.backends.appengine.meta_queries.AsyncMultiQuery.__init__') as query_init:
with sleuth.watch('google.appengine.api.datastore.Query.Ancestor') as query_anc:
keys = [str(x) for x in range(32)]
results = list(TestFruit.objects.only("color").filter(pk__in=keys).order_by("name"))
self.assertEqual(query_init.call_count, 1) # One multiquery
self.assertEqual(query_anc.call_count, 32) # 32 Ancestor calls
self.assertEqual(len(query_init.calls[0].args[1]), 32)
# Confirm the ordering is correct
self.assertEqual(sorted(keys), [ x.pk for x in results ])
results = list(TestFruit.objects.only("color").filter(pk__in=keys).order_by("name")[5:10])
self.assertEqual(len(results), 5)
self.assertEqual([x.pk for x in results], sorted(keys)[5:10])
# Make sure we can do a normal (non-projection) get by keys
self.assertItemsEqual(TestFruit.objects.filter(pk__in=keys), fruits)
def test_get_or_create(self):
"""
Django's get_or_create can do the following:
1. get(**lookup) -> throws DoesNotExist
2. Catches DoesNotExist
3. create() -> throws IntegrityError
4. get(**lookup)
This test proves that we throw the right kind of error at step 3 when
unique constraints are violated.
"""
def wrap_get(func):
def _wrapped(*args, **kwargs):
try:
if _wrapped.calls == 0:
raise UniqueModel.DoesNotExist()
else:
return func(*args, **kwargs)
finally:
_wrapped.calls += 1
_wrapped.calls = 0
return _wrapped
from django.db.models import query
wrapped_get = wrap_get(query.QuerySet.get)
UniqueModel.objects.create(unique_field="Test")
with disable_cache():
with sleuth.switch("django.db.models.query.QuerySet.get", wrapped_get):
instance, created = UniqueModel.objects.get_or_create(unique_field="Test")
self.assertFalse(created)
def test_setting_non_null_null_throws_integrity_error(self):
with self.assertRaises(IntegrityError):
IntegerModel.objects.create(integer_field=None)
with self.assertRaises(IntegrityError):
instance = IntegerModel()
instance.integer_field = None
instance.save()
with self.assertRaises(IntegrityError):
instance = IntegerModel.objects.create(integer_field=1)
instance = IntegerModel.objects.get()
instance.integer_field = None
instance.save()
def test_normalise_field_value(self):
self.assertEqual(u'0000475231073257', normalise_field_value(decimal.Decimal(475231073257)))
self.assertEqual(u'-0000475231073257', normalise_field_value(decimal.Decimal(-475231073257)))
self.assertEqual(u'0000000004752311', normalise_field_value(decimal.Decimal(4752310.73257)))
self.assertEqual(u'0000004752310733', normalise_field_value(decimal.Decimal(4752310732.57)))
self.assertEqual(datetime.datetime(2015, 1, 27, 2, 46, 8, 584258), normalise_field_value(datetime.datetime(2015, 1, 27, 2, 46, 8, 584258)))
def test_decimal_to_string(self):
self.assertEqual(u'0002312487812767', decimal_to_string(decimal.Decimal(2312487812767)))
self.assertEqual(u'-0002312487812767', decimal_to_string(decimal.Decimal(-2312487812767)))
self.assertEqual(u'002312487812', decimal_to_string(decimal.Decimal(2312487812), 12))
self.assertEqual(u'002387812.320', decimal_to_string(decimal.Decimal(2387812.32), 12, 3))
self.assertEqual(u'-002387812.513', decimal_to_string(decimal.Decimal(-2387812.513212), 12, 3))
self.assertEqual(u'0237812.000', decimal_to_string(decimal.Decimal(237812), 10, 3))
self.assertEqual(u'-0237812.210', decimal_to_string(decimal.Decimal(-237812.21), 10, 3))
def test_gae_conversion(self):
# A PK IN query should result in a single get by key
with sleuth.switch("djangae.db.backends.appengine.commands.datastore.Get", lambda *args, **kwargs: []) as get_mock:
list(TestUser.objects.filter(pk__in=[1, 2, 3])) # Force the query to run
self.assertEqual(1, get_mock.call_count)
with sleuth.switch("djangae.db.backends.appengine.commands.datastore.Query.Run", lambda *args, **kwargs: []) as query_mock:
list(TestUser.objects.filter(username="test"))
self.assertEqual(1, query_mock.call_count)
with sleuth.switch("djangae.db.backends.appengine.meta_queries.AsyncMultiQuery.Run", lambda *args, **kwargs: []) as query_mock:
list(TestUser.objects.filter(username__in=["test", "cheese"]))
self.assertEqual(1, query_mock.call_count)
with sleuth.switch("djangae.db.backends.appengine.commands.datastore.Get", lambda *args, **kwargs: []) as get_mock:
list(TestUser.objects.filter(pk=1))
self.assertEqual(1, get_mock.call_count)
with sleuth.switch("djangae.db.backends.appengine.meta_queries.AsyncMultiQuery.Run", lambda *args, **kwargs: []) as query_mock:
list(TestUser.objects.exclude(username__startswith="test"))
self.assertEqual(1, query_mock.call_count)
with sleuth.switch("djangae.db.backends.appengine.commands.datastore.Get", lambda *args, **kwargs: []) as get_mock:
list(TestUser.objects.filter(pk__in=[1, 2, 3, 4, 5, 6, 7, 8]).
filter(username__in=["test", "test2", "test3"]).filter(email__in=["[email protected]", "[email protected]"]))
self.assertEqual(1, get_mock.call_count)
def test_gae_query_display(self):
# Shouldn't raise any exceptions:
representation = str(TestUser.objects.filter(username='test').query)
self.assertTrue('test' in representation)
self.assertTrue('username' in representation)
def test_range_behaviour(self):
IntegerModel.objects.create(integer_field=5)
IntegerModel.objects.create(integer_field=10)
IntegerModel.objects.create(integer_field=15)
self.assertItemsEqual([10], IntegerModel.objects.filter(integer_field__range=(6, 14)).values_list("integer_field", flat=True))
self.assertItemsEqual([5, 10, 15], IntegerModel.objects.filter(integer_field__range=(5, 15)).order_by("integer_field").values_list("integer_field", flat=True))
self.assertItemsEqual([5, 15], IntegerModel.objects.exclude(integer_field__range=(6, 14)).values_list("integer_field", flat=True))
def test_exclude_nullable_field(self):
instance = ModelWithNullableCharField.objects.create(some_id=999) # Create a nullable thing
ModelWithNullableCharField.objects.create(some_id=999, field1="test") # Create a nullable thing
self.assertItemsEqual([instance], ModelWithNullableCharField.objects.filter(some_id=999).exclude(field1="test").all())
instance.field1 = "bananas"
instance.save()
self.assertEqual(instance, ModelWithNullableCharField.objects.filter(some_id=999).exclude(field1="test")[0])
def test_null_date_field(self):
null_date = NullDate()
null_date.save()
null_date = NullDate.objects.get()
self.assertIsNone(null_date.date)
self.assertIsNone(null_date.time)
self.assertIsNone(null_date.datetime)
def test_convert_unicode_subclasses_to_unicode(self):
# The App Engine SDK raises BadValueError if you try saving a SafeText
# string to a CharField. Djangae explicitly converts it to unicode.
grue = SafeText(u'grue')
self.assertIsInstance(grue, unicode)
self.assertNotEqual(type(grue), unicode)
obj = TestFruit.objects.create(name=u'foo', color=grue)
obj = TestFruit.objects.get(pk=obj.pk)
self.assertEqual(type(obj.color), unicode)
obj = TestFruit.objects.filter(color=grue)[0]
self.assertEqual(type(obj.color), unicode)
def test_notsupportederror_thrown_on_too_many_inequalities(self):
TestFruit.objects.create(name="Apple", color="Green", origin="England")
pear = TestFruit.objects.create(name="Pear", color="Green")
banana = TestFruit.objects.create(name="Banana", color="Yellow")
# Excluding one field is fine
self.assertItemsEqual([pear, banana], list(TestFruit.objects.exclude(name="Apple")))
# Excluding a field, and doing a > or < on another is not so fine
with self.assertRaises(NotSupportedError):
self.assertEqual(pear, TestFruit.objects.exclude(origin="England").filter(color__lt="Yellow").get())
# Same with excluding two fields
with self.assertRaises(NotSupportedError):
list(TestFruit.objects.exclude(origin="England").exclude(color="Yellow"))
# But apparently excluding the same field twice is OK
self.assertItemsEqual([banana], list(TestFruit.objects.exclude(origin="England").exclude(name="Pear").order_by("origin")))
# And apparently having both a __gt and a __lt filter on the same field is also fine
self.assertItemsEqual([banana], list(TestFruit.objects.order_by().filter(name__lt="Pear", name__gt="Apple")))
def test_excluding_pks_is_emulated(self):
apple = TestFruit.objects.create(name="Apple", color="Green", is_mouldy=True, origin="England")
banana = TestFruit.objects.create(name="Banana", color="Yellow", is_mouldy=True, origin="Dominican Republic")
cherry = TestFruit.objects.create(name="Cherry", color="Red", is_mouldy=True, origin="Germany")
pear = TestFruit.objects.create(name="Pear", color="Green", origin="England")
self.assertEqual([apple, pear], list(TestFruit.objects.filter(origin__lt="Germany").exclude(pk=banana.pk).exclude(pk=cherry.pk).order_by("origin")))
self.assertEqual([apple, cherry], list(TestFruit.objects.exclude(origin="Dominican Republic").exclude(pk=pear.pk).order_by("origin")))
self.assertEqual([], list(TestFruit.objects.filter(is_mouldy=True).filter(color="Green", origin__gt="England").exclude(pk=pear.pk).order_by("-origin")))
self.assertEqual([cherry, banana], list(TestFruit.objects.exclude(pk=pear.pk).order_by("-name")[:2]))
self.assertEqual([banana, apple], list(TestFruit.objects.exclude(pk=pear.pk).order_by("origin", "name")[:2]))
def test_datetime_fields(self):
date = datetime.datetime.today()
dt = datetime.datetime.now()
time = datetime.time(0,0,0)
# check if creating objects work
obj = NullDate.objects.create(date=date, datetime=dt, time=time)
# check if filtering objects work
self.assertItemsEqual([obj], NullDate.objects.filter(datetime=dt))
self.assertItemsEqual([obj], NullDate.objects.filter(date=date))
self.assertItemsEqual([obj], NullDate.objects.filter(time=time))
# check if updating objects work
obj.date = date + datetime.timedelta(days=1)
obj.datetime = dt + datetime.timedelta(days=1)
obj.time = datetime.time(23,0,0)
obj.save()
self.assertItemsEqual([obj], NullDate.objects.filter(datetime=obj.datetime))
self.assertItemsEqual([obj], NullDate.objects.filter(date=obj.date))
self.assertItemsEqual([obj], NullDate.objects.filter(time=obj.time))
def test_related_datetime_nullable(self):
date = datetime.datetime.today()
dt = datetime.datetime.now()
time = datetime.time(0,0,0)
date_set = NullDateSet.objects.create()
empty_obj = NullDate.objects.create(date=None, datetime=None, time=None)
date_set.dates.add(empty_obj)
obj = NullDate.objects.create(date=date, datetime=dt, time=time)
date_set.dates.add(obj)
date_set.save()
# check if filtering/excluding of None works in RelatedSetField
self.assertItemsEqual([obj], date_set.dates.filter(datetime__isnull=False))
self.assertItemsEqual([obj], date_set.dates.filter(date__isnull=False))
self.assertItemsEqual([obj], date_set.dates.filter(time__isnull=False))
self.assertItemsEqual([obj], date_set.dates.exclude(datetime=None))
self.assertItemsEqual([obj], date_set.dates.exclude(date=None))
self.assertItemsEqual([obj], date_set.dates.exclude(time=None))
# sorting should work too
self.assertItemsEqual([obj, empty_obj], date_set.dates.order_by('datetime'))
self.assertItemsEqual([empty_obj, obj], date_set.dates.order_by('-datetime'))
self.assertItemsEqual([obj, empty_obj], date_set.dates.order_by('date'))
self.assertItemsEqual([empty_obj, obj], date_set.dates.order_by('-date'))
self.assertItemsEqual([obj, empty_obj], date_set.dates.order_by('time'))
self.assertItemsEqual([empty_obj, obj], date_set.dates.order_by('-time'))
def test_update_with_f_expr(self):
i = IntegerModel.objects.create(integer_field=1000)
qs = IntegerModel.objects.all()
qs.update(integer_field=models.F('integer_field') + 1)
self.assertRaises(IntegerModel.DoesNotExist, IntegerModel.objects.get, integer_field=1000)
i = IntegerModel.objects.get(pk=i.pk)
self.assertEqual(1001, i.integer_field)
def test_save_with_f_expr(self):
i = IntegerModel.objects.create(integer_field=1000)
i.integer_field = models.F('integer_field') + 1
i.save()
self.assertRaises(IntegerModel.DoesNotExist, IntegerModel.objects.get, integer_field=1000)
i = IntegerModel.objects.get(pk=i.pk)
self.assertEqual(1001, i.integer_field)
def test_ordering_by_scatter_property(self):
try:
list(TestFruit.objects.order_by("__scatter__"))
except:
logging.exception("Error sorting on __scatter__")
self.fail("Unable to sort on __scatter__ property like we should")
def test_ordering_on_non_indexed_fields_not_supported(self):
self.assertRaises(NotSupportedError, list, TestFruit.objects.order_by("text_field"))
self.assertRaises(NotSupportedError, list, TestFruit.objects.order_by("binary_field"))
def test_ordering_on_sparse_field(self):
"""
Case when sorting on field that is not present in all of
Datastore entities. That can easily happen when you added
new field to model and did not populated all existing entities
"""
# Clean state
self.assertEqual(TestFruit.objects.count(), 0)
# Put constistent instances to Datastore
TestFruit.objects.create(name='a', color='a')
TestFruit.objects.create(name='b', color='b')
# Put inconsistent instances to Datastore
# Color fields is missing (not even None)
# we need more than 1 so we explore all sorting branches
values = {'name': 'c'}
entity = datastore.Entity(TestFruit._meta.db_table, namespace=DEFAULT_NAMESPACE, **values)
entity.update(values)
datastore.Put(entity)
values = {'name': 'd'}
entity = datastore.Entity(TestFruit._meta.db_table, namespace=DEFAULT_NAMESPACE, **values)
entity.update(values)
datastore.Put(entity)
# Ok, we can get all 4 instances
self.assertEqual(TestFruit.objects.count(), 4)
# Sorted list. No exception should be raised
# (esp KeyError from django_ordering_comparison)
with sleuth.watch('djangae.db.backends.appengine.commands.utils.django_ordering_comparison') as compare:
all_names = ['a', 'b', 'c', 'd']
fruits = list(
TestFruit.objects.filter(name__in=all_names).order_by('color', 'name')
)
# Make sure troubled code got triggered
# ie. with all() it doesn't
self.assertTrue(compare.called)
# Test the ordering of the results. The ones with a color of None should come back first,
# and of the ones with color=None, they should be ordered by name
# Missing one (None) as first
expected_fruits = [
('c', None), ('d', None), ('a', 'a'), ('b', 'b'),
]
self.assertEqual(
[(fruit.name, fruit.color) for fruit in fruits],
expected_fruits,
)
def test_update_query_does_not_update_entities_which_no_longer_match_query(self):
""" When doing queryset.update(field=x), any entities which the query returns but which no
longer match the query (due to eventual consistency) should not be altered.
"""
obj = TestFruit.objects.create(name='apple', color='green', is_mouldy=False)
with inconsistent_db(probability=0):
# alter our object, so that it should no longer match the query that we then do
obj.color = 'blue'
obj.save()
# Now run a query, our object is changed, but the inconsistency means it will still match
queryset = TestFruit.objects.filter(color='green')
assert queryset.count(), "inconsistent_db context manager isn't working" # sanity
# Now run an update with that query, the update should NOT be applied, because it
# should re-check that the object still matches the query
queryset.update(is_mouldy=True)
obj = TestFruit.objects.get(pk=obj.pk)
self.assertFalse(obj.is_mouldy)
@skipIf(django.VERSION < (1, 8), "DurationField only applies to Django <= 1.8")
def test_duration_field_stored_as_float(self):
""" See issue #512. We have a bug report that the DurationField comes back as None when
the value is set to a particular value which is roughly 3 days. This is caused by it
being stored as a float instead of an int in the DB.
"""
td2 = datetime.timedelta(days=2)
# If the duration value is stored as a float instead of an int then this particular duration
# will cause django.db.backends.base.operations.BaseDatabaseOperations.convert_durationfield_value
# to return the value as None
td3 = datetime.timedelta(days=3, seconds=14658, microseconds=886540)
durations_as_2 = DurationModel.objects.create(
duration_field=td2,
duration_field_nullable=td2
)
durations_as_3 = DurationModel.objects.create(
duration_field=td3,
duration_field_nullable=td3
)
self.assertEqual(durations_as_2.duration_field, td2)
self.assertEqual(durations_as_2.duration_field_nullable, td2)
self.assertEqual(durations_as_3.duration_field, td3)
self.assertEqual(durations_as_3.duration_field_nullable, td3)
durations_as_2 = DurationModel.objects.get(pk=durations_as_2.pk)
durations_as_3 = DurationModel.objects.get(pk=durations_as_3.pk)
self.assertEqual(durations_as_2.duration_field, td2)
self.assertEqual(durations_as_2.duration_field_nullable, td2)
self.assertEqual(durations_as_3.duration_field, td3)
self.assertEqual(durations_as_3.duration_field_nullable, td3)
# And just for good measure, check the raw value in the datastore
key = datastore.Key.from_path(DurationModel._meta.db_table, durations_as_3.pk, namespace=DEFAULT_NAMESPACE)
entity = datastore.Get(key)
self.assertTrue(isinstance(entity['duration_field'], (int, long)))
def test_datetime_and_time_fields_precision_for_projection_queries(self):
"""
The returned datetime and time values should include microseconds.
See issue #707.
"""
t = datetime.time(22, 13, 50, 541022)
dt = datetime.datetime(2016, 5, 27, 18, 40, 12, 927371)
NullDate.objects.create(time=t, datetime=dt)
result = NullDate.objects.all().values_list('time', 'datetime')
expected = [(t, dt)]
self.assertItemsEqual(result, expected)
def test_filter_with_empty_q(self):
t1 = TestUser.objects.create(username='foo', field2='bar')
condition = Q() | Q(username='foo')
self.assertEqual(t1, TestUser.objects.filter(condition).first())
condition = Q()
self.assertEqual(t1, TestUser.objects.filter(condition).first())
def test_only_defer_does_project(self):
with sleuth.watch("google.appengine.api.datastore.Query.__init__") as watcher:
list(TestUser.objects.only("pk").all())
self.assertTrue(watcher.calls[0].kwargs["keys_only"])
self.assertFalse(watcher.calls[0].kwargs["projection"])
with sleuth.watch("google.appengine.api.datastore.Query.__init__") as watcher:
list(TestUser.objects.values("pk"))
self.assertTrue(watcher.calls[0].kwargs["keys_only"])
self.assertFalse(watcher.calls[0].kwargs["projection"])
with sleuth.watch("google.appengine.api.datastore.Query.__init__") as watcher:
list(TestUser.objects.only("username").all())
self.assertFalse(watcher.calls[0].kwargs["keys_only"])
self.assertItemsEqual(watcher.calls[0].kwargs["projection"], ["username"])
with sleuth.watch("google.appengine.api.datastore.Query.__init__") as watcher:
list(TestUser.objects.defer("username").all())
self.assertFalse(watcher.calls[0].kwargs["keys_only"])
self.assertTrue(watcher.calls[0].kwargs["projection"])
self.assertFalse("username" in watcher.calls[0].kwargs["projection"])
def test_chaining_none_filter(self):
t1 = TestUser.objects.create()
self.assertFalse(TestUser.objects.none().filter(pk=t1.pk))
class ModelFormsetTest(TestCase):
def test_reproduce_index_error(self):
class TestModelForm(ModelForm):
class Meta:
model = TestUser
fields = ("username", "email", "field2")
test_model = TestUser.objects.create(username='foo', field2='bar')
TestModelFormSet = modelformset_factory(TestUser, form=TestModelForm, extra=0)
TestModelFormSet(queryset=TestUser.objects.filter(pk=test_model.pk))
data = {
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': 0,
'form-TOTAL_FORMS': 0,
'form-0-id': test_model.id,
'form-0-field1': 'foo_1',
'form-0-field2': 'bar_1',
}
factory = RequestFactory()
request = factory.post('/', data=data)
TestModelFormSet(request.POST, request.FILES)
class CacheTests(TestCase):
def test_cache_set(self):
cache.set('test?', 'yes!')
self.assertEqual(cache.get('test?'), 'yes!')
def test_cache_timeout(self):
cache.set('test?', 'yes!', 1)
import time
time.sleep(1)
self.assertEqual(cache.get('test?'), None)
def compare_markers(list1, list2):
return (
sorted([(x.key(), x.instance) for x in list1]) == sorted([(x.key(), x.instance) for x in list2])
)
class ConstraintTests(TestCase):
"""
Tests for unique constraint handling
"""
def test_transaction_failure_to_apply(self):
"""
This test simulates a failure to apply a transaction when saving an
entity. The mocked function allows independent transactions to work
normally so that we are testing what happens when markers can be created
(which use independent transactions) but the outer transaction fails
"""
original_commit = datastore_rpc.TransactionalConnection.commit
def fake_commit(self, *args, **kwargs):
config = self._BaseConnection__config
# Do the normal thing on the constraint's independent transaction, but
# fail otherwise
if config.propagation == datastore_rpc.TransactionOptions.INDEPENDENT:
return original_commit(self, *args, **kwargs)
return False
initial_constraints = list(UniqueMarker.all())
with sleuth.switch('google.appengine.datastore.datastore_rpc.TransactionalConnection.commit', fake_commit) as commit:
self.assertRaises(TransactionFailedError, ModelWithUniques.objects.create, name="One")
self.assertTrue(commit.called)
# Constraints should be the same
self.assertTrue(compare_markers(initial_constraints, UniqueMarker.all()))
instance = ModelWithUniques.objects.create(name="One")
initial_constraints = list(UniqueMarker.all())
with sleuth.switch('google.appengine.datastore.datastore_rpc.TransactionalConnection.commit', fake_commit) as commit:
instance.name = "Two"
self.assertRaises(TransactionFailedError, instance.save)
self.assertTrue(commit.called)
# Constraints should be the same
self.assertTrue(compare_markers(initial_constraints, UniqueMarker.all()))
def test_marker_creation_transaction_failure(self):
"""
This test simulates a failure to apply a transaction when saving an
entity. The mocked function prevents independent transactions from working
meaning that markers can't be acquired or released. This should force
any outer transaction to rollback
"""
original_commit = datastore_rpc.TransactionalConnection.commit
def fake_commit(self, *args, **kwargs):
config = self._BaseConnection__config
# Blow up on independent transactions
if config.propagation != datastore_rpc.TransactionOptions.INDEPENDENT:
return original_commit(self, *args, **kwargs)
return False
initial_constraints = list(UniqueMarker.all())
with sleuth.switch('google.appengine.datastore.datastore_rpc.TransactionalConnection.commit', fake_commit) as commit:
self.assertRaises(TransactionFailedError, ModelWithUniques.objects.create, name="One")
self.assertTrue(commit.called)
# Constraints should be the same
self.assertTrue(compare_markers(initial_constraints, UniqueMarker.all()))
self.assertRaises(ModelWithUniques.DoesNotExist, ModelWithUniques.objects.get, name="One")
instance = ModelWithUniques.objects.create(name="One")
initial_constraints = list(UniqueMarker.all())
with sleuth.switch('google.appengine.datastore.datastore_rpc.TransactionalConnection.commit', fake_commit) as commit:
instance.name = "Two"
self.assertRaises(TransactionFailedError, instance.save)
self.assertTrue(commit.called)
# Constraints should be the same
self.assertTrue(compare_markers(initial_constraints, UniqueMarker.all()))
self.assertRaises(ModelWithUniques.DoesNotExist, ModelWithUniques.objects.get, name="Two")
self.assertEqual(instance, ModelWithUniques.objects.get(name="One"))
def test_update_updates_markers(self):
initial_count = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count()
instance = ModelWithUniques.objects.create(name="One")
self.assertEqual(
1,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
qry = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE)
qry.Order(("created", datastore.Query.DESCENDING))
marker = [x for x in qry.Run()][0]
# Make sure we assigned the instance
self.assertEqual(
marker["instance"],
datastore.Key.from_path(instance._meta.db_table, instance.pk, namespace=DEFAULT_NAMESPACE)
)
expected_marker = "{}|name:{}".format(ModelWithUniques._meta.db_table, md5("One").hexdigest())
self.assertEqual(expected_marker, marker.key().id_or_name())
instance.name = "Two"
instance.save()
self.assertEqual(
1,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
marker = [x for x in qry.Run()][0]
# Make sure we assigned the instance
self.assertEqual(
marker["instance"],
datastore.Key.from_path(instance._meta.db_table, instance.pk, namespace=DEFAULT_NAMESPACE)
)
expected_marker = "{}|name:{}".format(ModelWithUniques._meta.db_table, md5("Two").hexdigest())
self.assertEqual(expected_marker, marker.key().id_or_name())
def test_conflicting_insert_throws_integrity_error(self):
try:
constraints.UNOWNED_MARKER_TIMEOUT_IN_SECONDS = 0
ModelWithUniques.objects.create(name="One")
with self.assertRaises(IntegrityError):
ModelWithUniques.objects.create(name="One")
# An insert with a specified ID enters a different code path
# so we need to ensure it works
ModelWithUniques.objects.create(id=555, name="Two")
with self.assertRaises(IntegrityError):
ModelWithUniques.objects.create(name="Two")
# Make sure that bulk create works properly
ModelWithUniques.objects.bulk_create([
ModelWithUniques(name="Three"),
ModelWithUniques(name="Four"),
ModelWithUniques(name="Five"),
])
with self.assertRaises(IntegrityError):
ModelWithUniques.objects.create(name="Four")
with self.assertRaises(NotSupportedError):
# Make sure bulk creates are limited when there are unique constraints
# involved
ModelWithUniques.objects.bulk_create(
[ ModelWithUniques(name=str(x)) for x in range(26) ]
)
finally:
constraints.UNOWNED_MARKER_TIMEOUT_IN_SECONDS = 5
def test_integrity_error_message_correct(self):
""" Check that the IntegrityError messages mentions the correct field(s). """
# Create a conflict on `unique_field`
obj1 = UniqueModel.objects.create(unique_field="One")
try:
UniqueModel.objects.create(unique_field="One", unique_combo_one=1)
except IntegrityError as e:
self.assertTrue("unique_field" in unicode(e))
# Create a conflict on `unique_relation`
UniqueModel.objects.create(unique_relation=obj1, unique_field="two", unique_combo_one=2)
try:
UniqueModel.objects.create(unique_relation=obj1, unique_field="three", unique_combo_one=3)
except IntegrityError as e:
self.assertTrue("unique_relation" in unicode(e))
# Create a conflict on a unique_together combo`
UniqueModel.objects.create(unique_field="four", unique_combo_one=4, unique_combo_two="five")
try:
UniqueModel.objects.create(unique_field="five", unique_combo_one=4, unique_combo_two="five")
except IntegrityError as e:
self.assertTrue("unique_combo_one" in unicode(e))
self.assertTrue("unique_combo_two" in unicode(e))
def test_table_flush_clears_markers_for_that_table(self):
ModelWithUniques.objects.create(name="One")
UniqueModel.objects.create(unique_field="One")
FlushCommand(ModelWithUniques._meta.db_table, default_connection).execute()
ModelWithUniques.objects.create(name="One")
with self.assertRaises(IntegrityError):
UniqueModel.objects.create(unique_field="One")
def test_recently_deleted_unique_doesnt_come_back(self):
instance = ModelWithUniques.objects.create(name="One")
with inconsistent_db():
instance.delete()
self.assertEqual(0, ModelWithUniques.objects.filter(name="One").count())
self.assertFalse(ModelWithUniques.objects.filter(name="One").exists())
self.assertFalse(list(ModelWithUniques.objects.all())) # Triple-check
def test_conflicting_update_throws_integrity_error(self):
ModelWithUniques.objects.create(name="One")
instance = ModelWithUniques.objects.create(name="Two")
with self.assertRaises(IntegrityError):
instance.name = "One"
instance.save()
def test_existing_marker_replaced_if_nonexistent_instance(self):
stale_instance = ModelWithUniques.objects.create(name="One")
# Delete the entity without updating the markers
key = datastore.Key.from_path(ModelWithUniques._meta.db_table, stale_instance.pk, namespace=DEFAULT_NAMESPACE)
datastore.Delete(key)
ModelWithUniques.objects.create(name="One") # Should be fine
with self.assertRaises(IntegrityError):
ModelWithUniques.objects.create(name="One")
def test_unique_combinations_are_returned_correctly(self):
combos_one = _unique_combinations(ModelWithUniquesOnForeignKey, ignore_pk=True)
combos_two = _unique_combinations(ModelWithUniquesOnForeignKey, ignore_pk=False)
self.assertEqual([['name', 'related_name'], ['name'], ['related_name']], combos_one)
self.assertEqual([['name', 'related_name'], ['id'], ['name'], ['related_name']], combos_two)
class Entity(dict):
def __init__(self, model, id):
self._key = datastore.Key.from_path(model, id, namespace=DEFAULT_NAMESPACE)
def key(self):
return self._key
e1 = Entity(ModelWithUniquesOnForeignKey._meta.db_table, 1)
e1["name"] = "One"
e1["related_name_id"] = 1
ids_one = unique_identifiers_from_entity(ModelWithUniquesOnForeignKey, e1)
self.assertItemsEqual([
u'djangae_modelwithuniquesonforeignkey|id:1',
u'djangae_modelwithuniquesonforeignkey|name:06c2cea18679d64399783748fa367bdd',
u'djangae_modelwithuniquesonforeignkey|related_name_id:1',
u'djangae_modelwithuniquesonforeignkey|name:06c2cea18679d64399783748fa367bdd|related_name_id:1'
], ids_one)
def test_error_on_update_doesnt_change_markers(self):
initial_count = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count()
instance = ModelWithUniques.objects.create(name="One")
self.assertEqual(
1,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
qry = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE)
qry.Order(("created", datastore.Query.DESCENDING))
marker = [x for x in qry.Run()][0]
# Make sure we assigned the instance
self.assertEqual(
marker["instance"],
datastore.Key.from_path(instance._meta.db_table, instance.pk, namespace=DEFAULT_NAMESPACE)
)
expected_marker = "{}|name:{}".format(ModelWithUniques._meta.db_table, md5("One").hexdigest())
self.assertEqual(expected_marker, marker.key().id_or_name())
instance.name = "Two"
def wrapped_put(*args, **kwargs):
kind = args[0][0].kind() if isinstance(args[0], list) else args[0].kind()
if kind != UniqueMarker.kind():
raise AssertionError()
return datastore.Put(*args, **kwargs)
with sleuth.switch("djangae.db.backends.appengine.commands.datastore.Put", wrapped_put):
with self.assertRaises(Exception):
instance.save()
self.assertEqual(
1,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
marker = [x for x in qry.Run()][0]
# Make sure we assigned the instance
self.assertEqual(
marker["instance"],
datastore.Key.from_path(instance._meta.db_table, instance.pk, namespace=DEFAULT_NAMESPACE)
)
expected_marker = "{}|name:{}".format(ModelWithUniques._meta.db_table, md5("One").hexdigest())
self.assertEqual(expected_marker, marker.key().id_or_name())
def test_error_on_insert_doesnt_create_markers(self):
initial_count = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count()
def wrapped_put(*args, **kwargs):
kind = args[0][0].kind() if isinstance(args[0], list) else args[0].kind()
if kind != UniqueMarker.kind():
raise AssertionError()
return datastore.Put(*args, **kwargs)
with sleuth.switch("djangae.db.backends.appengine.commands.datastore.Put", wrapped_put):
with self.assertRaises(Exception):
ModelWithUniques.objects.create(name="One")
self.assertEqual(
0,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
def test_delete_clears_markers(self):
initial_count = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count()
instance = ModelWithUniques.objects.create(name="One")
self.assertEqual(
1,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
instance.delete()
self.assertEqual(
0,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
@override_settings(DJANGAE_DISABLE_CONSTRAINT_CHECKS=True)
def test_constraints_disabled_doesnt_create_or_check_markers(self):
initial_count = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count()
instance1 = ModelWithUniques.objects.create(name="One")
self.assertEqual(
initial_count,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count()
)
instance2 = ModelWithUniques.objects.create(name="One")
self.assertEqual(instance1.name, instance2.name)
self.assertFalse(instance1 == instance2)
@override_settings(DJANGAE_DISABLE_CONSTRAINT_CHECKS=True)
def test_constraints_can_be_enabled_per_model(self):
initial_count = datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count()
ModelWithUniquesAndOverride.objects.create(name="One")
self.assertEqual(
1,
datastore.Query(UniqueMarker.kind(), namespace=DEFAULT_NAMESPACE).Count() - initial_count
)
def test_list_field_unique_constaints(self):
instance1 = UniqueModel.objects.create(unique_field=1, unique_combo_one=1, unique_list_field=["A", "C"])
with self.assertRaises((IntegrityError, DataError)):
UniqueModel.objects.create(unique_field=2, unique_combo_one=2, unique_list_field=["A"])
instance2 = UniqueModel.objects.create(unique_field=2, unique_combo_one=2, unique_list_field=["B"])
instance2.unique_list_field = instance1.unique_list_field
with self.assertRaises((IntegrityError, DataError)):
instance2.save()
instance1.unique_list_field = []
instance1.save()
instance2.save()
def test_list_field_unique_constraints_validation(self):
instance1 = UniqueModel(
unique_set_field={"A"},
unique_together_list_field=[1],
unique_field=1,
unique_combo_one=1,
unique_list_field=["A", "C"]
)
# Without a custom mixin, Django can't construct a unique validation query for a list field
self.assertRaises(ValueError, instance1.full_clean)
UniqueModel.__bases__ = (UniquenessMixin,) + UniqueModel.__bases__
instance1.full_clean()
instance1.save()
# Check the uniqueness mixing works with long lists
instance1.unique_list_field = [ x for x in range(31) ]
try:
instance1.full_clean()
except NotSupportedError:
self.fail("Couldn't run unique check on long list field")
return
instance2 = UniqueModel(
unique_set_field={"B"},
unique_together_list_field=[2],
unique_field=2,
unique_combo_one=2,
unique_list_field=["B", "C"] # duplicate value C!
)
self.assertRaises(ValidationError, instance2.full_clean)
UniqueModel.__bases__ = (models.Model,)
def test_set_field_unique_constraints(self):
instance1 = UniqueModel.objects.create(unique_field=1, unique_combo_one=1, unique_set_field={"A", "C"})
with self.assertRaises((IntegrityError, DataError)):
UniqueModel.objects.create(unique_field=2, unique_combo_one=2, unique_set_field={"A"})
instance2 = UniqueModel.objects.create(unique_field=2, unique_combo_one=2, unique_set_field={"B"})
instance2.unique_set_field = instance1.unique_set_field
with self.assertRaises((IntegrityError, DataError)):
instance2.save()
instance1.unique_set_field = set()
instance1.save()
instance2.save()
instance2.unique_set_field = set()
instance2.save() # You can have two fields with empty sets
def test_unique_constraints_on_model_with_long_str_pk(self):
""" Check that an object with a string-based PK of 500 characters (the max that GAE allows)
can still have unique constraints pointing at it. (See #242.)
"""
obj = UniqueModelWithLongPK(pk="x" * 500, unique_field=1)
obj.save()
duplicate = UniqueModelWithLongPK(pk="y" * 500, unique_field=1)
self.assertRaises(IntegrityError, duplicate.save)
class EdgeCaseTests(TestCase):
def setUp(self):
super(EdgeCaseTests, self).setUp()
add_special_index(TestUser, "username", IExactIndexer(), "iexact")
self.u1 = TestUser.objects.create(username="A", email="[email protected]", last_login=datetime.datetime.now().date(), id=1)
self.u2 = TestUser.objects.create(username="B", email="[email protected]", last_login=datetime.datetime.now().date(), id=2)
self.u3 = TestUser.objects.create(username="C", email="[email protected]", last_login=datetime.datetime.now().date(), id=3)
self.u4 = TestUser.objects.create(username="D", email="[email protected]", last_login=datetime.datetime.now().date(), id=4)
self.u5 = TestUser.objects.create(username="E", email="[email protected]", last_login=datetime.datetime.now().date(), id=5)
self.apple = TestFruit.objects.create(name="apple", color="red")
self.banana = TestFruit.objects.create(name="banana", color="yellow")
def test_querying_by_date(self):
instance1 = ModelWithDates.objects.create(start=datetime.date(2014, 1, 1), end=datetime.date(2014, 1, 20))
instance2 = ModelWithDates.objects.create(start=datetime.date(2014, 2, 1), end=datetime.date(2014, 2, 20))
self.assertEqual(instance1, ModelWithDates.objects.get(start__lt=datetime.date(2014, 1, 2)))
self.assertEqual(2, ModelWithDates.objects.filter(start__lt=datetime.date(2015, 1, 1)).count())
self.assertEqual(instance2, ModelWithDates.objects.get(start__gt=datetime.date(2014, 1, 2)))
self.assertEqual(instance2, ModelWithDates.objects.get(start__gte=datetime.date(2014, 2, 1)))
def projection_plus_keys_filtering(self):
"""
If you do a query like this:
MyModel.objects.filter(pk__in=[1, 2]).filter(field1="Bananas").values_list("id", "someotherfield")
Then a projection query is run. The problem is that the entities returned only include "id" and "someotherfield"
but not "field1". Our entity-matches-query code should not run in this situation as we pass
all filters to the ancestor queries and so any entities returned should match.
"""
user = TestUser.objects.create(username="test", email="[email protected]")
self.assertItemsEqual(
[(user.pk, user.username)],
TestUser.objects.filter(
pk__in=[user.pk, user.pk+1]).filter(email="[email protected]"
).values_list("id", "username")
)
def test_double_starts_with(self):
qs = TestUser.objects.filter(username__startswith='Hello') | TestUser.objects.filter(username__startswith='Goodbye')
self.assertEqual(0, qs.count())
TestUser.objects.create(username="Hello")
self.assertEqual(1, qs.count())
TestUser.objects.create(username="Goodbye")
self.assertEqual(2, qs.count())
TestUser.objects.create(username="Hello and Goodbye")
self.assertEqual(3, qs.count())
def test_impossible_starts_with(self):
TestUser.objects.create(username="Hello")
TestUser.objects.create(username="Goodbye")
TestUser.objects.create(username="Hello and Goodbye")
qs = TestUser.objects.filter(username__startswith='Hello') & TestUser.objects.filter(username__startswith='Goodbye')
self.assertEqual(0, qs.count())
def test_datetime_contains(self):
"""
Django allows for __contains on datetime field, so that you can search for a specific
date. This is probably just because SQL allows querying it on a string, and contains just
turns into a like query. This test just makes sure we behave the same
"""
instance = DateTimeModel.objects.create() # Create a DateTimeModel, it has auto_now stuff
# Make sure that if we query a datetime on a date it is properly returned
self.assertItemsEqual([instance], DateTimeModel.objects.filter(datetime_field__contains=instance.datetime_field.date()))
self.assertItemsEqual([instance], DateTimeModel.objects.filter(date_field__contains=instance.date_field.year))
def test_combinations_of_special_indexes(self):
qs = TestUser.objects.filter(username__iexact='Hello') | TestUser.objects.filter(username__contains='ood')
self.assertEqual(0, qs.count())
TestUser.objects.create(username="Hello")
self.assertEqual(1, qs.count())
TestUser.objects.create(username="Goodbye")
self.assertEqual(2, qs.count())
TestUser.objects.create(username="Hello and Goodbye")
self.assertEqual(3, qs.count())
def test_multi_table_inheritance(self):
parent = MultiTableParent.objects.create(parent_field="parent1")
child1 = MultiTableChildOne.objects.create(parent_field="child1", child_one_field="child1")
child2 = MultiTableChildTwo.objects.create(parent_field="child2", child_two_field="child2")
self.assertEqual(3, MultiTableParent.objects.count())
self.assertItemsEqual([parent.pk, child1.pk, child2.pk],
list(MultiTableParent.objects.values_list('pk', flat=True)))
self.assertEqual(1, MultiTableChildOne.objects.count())
self.assertEqual(child1, MultiTableChildOne.objects.get())
self.assertEqual(1, MultiTableChildTwo.objects.count())
self.assertEqual(child2, MultiTableChildTwo.objects.get())
self.assertEqual(child2, MultiTableChildTwo.objects.get(pk=child2.pk))
self.assertTrue(MultiTableParent.objects.filter(pk=child2.pk).exists())
def test_anding_pks(self):
results = TestUser.objects.filter(id__exact=self.u1.pk).filter(id__exact=self.u2.pk)
self.assertEqual(list(results), [])
def test_unusual_queries(self):
results = TestFruit.objects.filter(name__in=["apple", "orange"])
self.assertEqual(1, len(results))
self.assertItemsEqual(["apple"], [x.name for x in results])
results = TestFruit.objects.filter(name__in=["apple", "banana"])
self.assertEqual(2, len(results))
self.assertItemsEqual(["apple", "banana"], [x.name for x in results])
results = TestFruit.objects.filter(name__in=["apple", "banana"]).values_list('pk', 'color')
self.assertEqual(2, len(results))
self.assertItemsEqual([(self.apple.pk, self.apple.color), (self.banana.pk, self.banana.color)], results)
results = TestUser.objects.all()
self.assertEqual(5, len(results))
results = TestUser.objects.filter(username__in=["A", "B"])
self.assertEqual(2, len(results))
self.assertItemsEqual(["A", "B"], [x.username for x in results])
results = TestUser.objects.filter(username__in=["A", "B"]).exclude(username="A")
self.assertEqual(1, len(results), results)
self.assertItemsEqual(["B"], [x.username for x in results])
results = TestUser.objects.filter(username__lt="E")
self.assertEqual(4, len(results))
self.assertItemsEqual(["A", "B", "C", "D"], [x.username for x in results])
results = TestUser.objects.filter(username__lte="E")
self.assertEqual(5, len(results))
#Double exclude on different properties not supported
with self.assertRaises(NotSupportedError):
#FIXME: This should raise a NotSupportedError, but at the moment it's thrown too late in
#the process and so Django wraps it as a DataError
list(TestUser.objects.exclude(username="E").exclude(email="A"))
results = list(TestUser.objects.exclude(username="E").exclude(username="A"))
self.assertItemsEqual(["B", "C", "D"], [x.username for x in results ])
results = TestUser.objects.filter(username="A", email="[email protected]")
self.assertEqual(1, len(results))
results = TestUser.objects.filter(username__in=["A", "B"]).filter(username__in=["A", "B"])
self.assertEqual(2, len(results))
self.assertItemsEqual(["A", "B"], [x.username for x in results])
results = TestUser.objects.filter(username__in=["A", "B"]).filter(username__in=["A"])
self.assertEqual(1, len(results))
self.assertItemsEqual(["A"], [x.username for x in results])
results = TestUser.objects.filter(pk__in=[self.u1.pk, self.u2.pk]).filter(username__in=["A"])
self.assertEqual(1, len(results))
self.assertItemsEqual(["A"], [x.username for x in results])
results = TestUser.objects.filter(username__in=["A"]).filter(pk__in=[self.u1.pk, self.u2.pk])
self.assertEqual(1, len(results))
self.assertItemsEqual(["A"], [x.username for x in results])
results = list(TestUser.objects.all().exclude(username__in=["A"]))
self.assertItemsEqual(["B", "C", "D", "E"], [x.username for x in results ])
results = list(TestFruit.objects.filter(name='apple', color__in=[]))
self.assertItemsEqual([], results)
results = list(TestUser.objects.all().exclude(username__in=[]))
self.assertEqual(5, len(results))
self.assertItemsEqual(["A", "B", "C", "D", "E"], [x.username for x in results ])
results = list(TestUser.objects.all().exclude(username__in=[]).filter(username__in=["A", "B"]))
self.assertEqual(2, len(results))
self.assertItemsEqual(["A", "B"], [x.username for x in results])
results = list(TestUser.objects.all().filter(username__in=["A", "B"]).exclude(username__in=[]))
self.assertEqual(2, len(results))
self.assertItemsEqual(["A", "B"], [x.username for x in results])
def test_empty_string_key(self):
# Creating
with self.assertRaises(IntegrityError):
TestFruit.objects.create(name='')
# Getting
with self.assertRaises(TestFruit.DoesNotExist):
TestFruit.objects.get(name='')
# Filtering
results = list(TestFruit.objects.filter(name='').order_by("name"))
self.assertItemsEqual([], results)
# Combined filtering
results = list(TestFruit.objects.filter(name='', color='red').order_by("name"))
self.assertItemsEqual([], results)
# IN query
results = list(TestFruit.objects.filter(name__in=['', 'apple']))
self.assertItemsEqual([self.apple], results)
def test_or_queryset(self):
"""
This constructs an OR query, this is currently broken in the parse_where_and_check_projection
function. WE MUST FIX THIS!
"""
q1 = TestUser.objects.filter(username="A")
q2 = TestUser.objects.filter(username="B")
self.assertItemsEqual([self.u1, self.u2], list(q1 | q2))
def test_or_q_objects(self):
""" Test use of Q objects in filters. """
query = TestUser.objects.filter(Q(username="A") | Q(username="B"))
self.assertItemsEqual([self.u1, self.u2], list(query))
def test_extra_select(self):
results = TestUser.objects.filter(username='A').extra(select={'is_a': "username = 'A'"})
self.assertEqual(1, len(results))
self.assertItemsEqual([True], [x.is_a for x in results])
results = TestUser.objects.all().exclude(username='A').extra(select={'is_a': "username = 'A'"})
self.assertEqual(4, len(results))
self.assertEqual(not any([x.is_a for x in results]), True)
# Up for debate
# results = User.objects.all().extra(select={'truthy': 'TRUE'})
# self.assertEqual(all([x.truthy for x in results]), True)
results = TestUser.objects.all().extra(select={'truthy': True})
self.assertEqual(all([x.truthy for x in results]), True)
def test_counts(self):
self.assertEqual(5, TestUser.objects.count())
self.assertEqual(2, TestUser.objects.filter(email="[email protected]").count())
self.assertEqual(3, TestUser.objects.exclude(email="[email protected]").count())
self.assertEqual(1, TestUser.objects.filter(username="A").exclude(email="[email protected]").count())
self.assertEqual(3, TestUser.objects.exclude(username="E").exclude(username="A").count())
self.assertEqual(3, TestUser.objects.exclude(username__in=["A", "B"]).count())
self.assertEqual(0, TestUser.objects.filter(email="[email protected]").exclude(username__in=["A", "B"]).count())
def test_exclude_with__in(self):
self.assertEqual(
set([self.u3, self.u4, self.u5]),
set(list(TestUser.objects.exclude(username__in=["A", "B"])))
)
def test_deletion(self):
count = TestUser.objects.count()
self.assertTrue(count)
TestUser.objects.filter(username="A").delete()
self.assertEqual(count - 1, TestUser.objects.count())
TestUser.objects.filter(username="B").exclude(username="B").delete() #Should do nothing
self.assertEqual(count - 1, TestUser.objects.count())
TestUser.objects.all().delete()
count = TestUser.objects.count()
self.assertFalse(count)
def test_double_delete(self):
u1 = TestUser.objects.get(username="A")
u2 = TestUser.objects.get(username="A")
u1.delete()
u2.delete()
def test_insert_with_existing_key(self):
user = TestUser.objects.create(id=999, username="test1", last_login=datetime.datetime.now().date())
self.assertEqual(999, user.pk)
with self.assertRaises(IntegrityError):
TestUser.objects.create(id=999, username="test2", last_login=datetime.datetime.now().date())
def test_included_pks(self):
ids = [ TestUser.objects.get(username="B").pk, TestUser.objects.get(username="A").pk ]
results = TestUser.objects.filter(pk__in=ids).order_by("username")
self.assertEqual(results[0], self.u1)
self.assertEqual(results[1], self.u2)
def test_select_related(self):
""" select_related should be a no-op... for now """
user = TestUser.objects.get(username="A")
Permission.objects.create(user=user, perm="test_perm")
select_related = [ (p.perm, p.user.username) for p in user.permission_set.select_related() ]
self.assertEqual(user.username, select_related[0][1])
def test_cross_selects(self):
user = TestUser.objects.get(username="A")
Permission.objects.create(user=user, perm="test_perm")
with self.assertRaises(NotSupportedError):
perms = list(Permission.objects.all().values_list("user__username", "perm"))
self.assertEqual("A", perms[0][0])
def test_invalid_id_value_on_insert(self):
user = TestUser.objects.get(username="A")
# pass in a User object as if it's an int
permission = Permission(user_id=user, perm="test_perm")
with self.assertRaises(TypeError):
permission.save(force_insert=True)
def test_values_list_on_pk_does_keys_only_query(self):
from google.appengine.api.datastore import Query
def replacement_init(*args, **kwargs):
replacement_init.called_args = args
replacement_init.called_kwargs = kwargs
original_init(*args, **kwargs)
replacement_init.called_args = None
replacement_init.called_kwargs = None
try:
original_init = Query.__init__
Query.__init__ = replacement_init
list(TestUser.objects.all().values_list('pk', flat=True))
finally:
Query.__init__ = original_init
self.assertTrue(replacement_init.called_kwargs.get('keys_only'))
self.assertEqual(5, len(TestUser.objects.all().values_list('pk')))
def test_iexact(self):
user = TestUser.objects.get(username__iexact="a")
self.assertEqual("A", user.username)
add_special_index(IntegerModel, "integer_field", IExactIndexer(), "iexact")
IntegerModel.objects.create(integer_field=1000)
integer_model = IntegerModel.objects.get(integer_field__iexact=str(1000))
self.assertEqual(integer_model.integer_field, 1000)
user = TestUser.objects.get(id__iexact=str(self.u1.id))
self.assertEqual("A", user.username)
def test_iexact_containing_underscores(self):
add_special_index(TestUser, "username", IExactIndexer(), "iexact")
user = TestUser.objects.create(username="A_B", email="[email protected]")
results = TestUser.objects.filter(username__iexact=user.username.lower())
self.assertEqual(list(results), [user])
def test_year(self):
user = TestUser.objects.create(username="Z", email="[email protected]")
user.last_login = datetime.datetime(2000,1,1,0,0,0)
user.save()
self.assertEqual(len(TestUser.objects.filter(last_login__year=3000)), 0)
self.assertEqual(TestUser.objects.filter(last_login__year=2000).first().pk, user.pk)
def test_ordering(self):
users = TestUser.objects.all().order_by("username")
self.assertEqual(["A", "B", "C", "D", "E"], [x.username for x in users])
users = TestUser.objects.all().order_by("-username")
self.assertEqual(["A", "B", "C", "D", "E"][::-1], [x.username for x in users])
with self.assertRaises(FieldError):
users = list(TestUser.objects.order_by("bananas"))
users = TestUser.objects.filter(id__in=[self.u2.id, self.u3.id, self.u4.id]).order_by('id')
self.assertEqual(["B", "C", "D"], [x.username for x in users])
users = TestUser.objects.filter(id__in=[self.u2.id, self.u3.id, self.u4.id]).order_by('-id')
self.assertEqual(["D", "C", "B"], [x.username for x in users])
users = TestUser.objects.filter(id__in=[self.u1.id, self.u5.id, self.u3.id]).order_by('id')
self.assertEqual(["A", "C", "E"], [x.username for x in users])
users = TestUser.objects.filter(id__in=[self.u4.id, self.u5.id, self.u3.id, self.u1.id]).order_by('-id')
self.assertEqual(["E", "D", "C", "A"], [x.username for x in users])
def test_dates_query(self):
z_user = TestUser.objects.create(username="Z", email="[email protected]")
z_user.last_login = datetime.date(2013, 4, 5)
z_user.save()
last_a_login = TestUser.objects.get(username="A").last_login
dates = TestUser.objects.dates('last_login', 'year')
self.assertItemsEqual(
[datetime.date(2013, 1, 1), datetime.date(last_a_login.year, 1, 1)],
dates
)
dates = TestUser.objects.dates('last_login', 'month')
self.assertItemsEqual(
[datetime.date(2013, 4, 1), datetime.date(last_a_login.year, last_a_login.month, 1)],
dates
)
dates = TestUser.objects.dates('last_login', 'day')
self.assertEqual(
[datetime.date(2013, 4, 5), last_a_login],
list(dates)
)
dates = TestUser.objects.dates('last_login', 'day', order='DESC')
self.assertEqual(
[last_a_login, datetime.date(2013, 4, 5)],
list(dates)
)
@override_settings(DJANGAE_MAX_QUERY_BRANCHES=30)
def test_in_query(self):
""" Test that the __in filter works, and that it cannot be used with more than 30 values,
unless it's used on the PK field.
"""
# Check that a basic __in query works
results = list(TestUser.objects.filter(username__in=['A', 'B']))
self.assertItemsEqual(results, [self.u1, self.u2])
# Check that it also works on PKs
results = list(TestUser.objects.filter(pk__in=[self.u1.pk, self.u2.pk]))
self.assertItemsEqual(results, [self.u1, self.u2])
# Check that using more than 30 items in an __in query not on the pk causes death
query = TestUser.objects.filter(username__in=list([x for x in letters[:31]]))
self.assertRaises(Exception, list, query)
# Check that it's ok with PKs though
query = TestUser.objects.filter(pk__in=list(range(1, 32)))
list(query)
# Check that it's ok joining filters with pks
results = list(TestUser.objects.filter(
pk__in=[self.u1.pk, self.u2.pk, self.u3.pk]).filter(pk__in=[self.u1.pk, self.u2.pk]))
self.assertItemsEqual(results, [self.u1, self.u2])
def test_self_relations(self):
obj = SelfRelatedModel.objects.create()
obj2 = SelfRelatedModel.objects.create(related=obj)
self.assertEqual(list(obj.selfrelatedmodel_set.all()), [obj2])
def test_special_indexes_for_empty_fields(self):
obj = TestFruit.objects.create(name='pear')
indexes = ['icontains', 'contains', 'iexact', 'iendswith', 'endswith', 'istartswith', 'startswith']
for index in indexes:
add_special_index(TestFruit, 'color', get_indexer(TestFruit._meta.get_field("color"), index), index)
obj.save()
def test_special_indexes_for_unusually_long_values(self):
obj = TestFruit.objects.create(name='pear', color='1234567890-=!@#$%^&*()_+qQWERwertyuiopasdfghjklzxcvbnm')
indexes = ['icontains', 'contains', 'iexact', 'iendswith', 'endswith', 'istartswith', 'startswith']
for index in indexes:
add_special_index(TestFruit, 'color', get_indexer(TestFruit._meta.get_field("color"), index), index)
obj.save()
qry = TestFruit.objects.filter(color__contains='1234567890-=!@#$%^&*()_+qQWERwertyuiopasdfghjklzxcvbnm')
self.assertEqual(len(list(qry)), 1)
qry = TestFruit.objects.filter(color__contains='890-=!@#$')
self.assertEqual(len(list(qry)), 1)
qry = TestFruit.objects.filter(color__contains='1234567890-=!@#$%^&*()_+qQWERwertyui')
self.assertEqual(len(list(qry)), 1)
qry = TestFruit.objects.filter(color__contains='8901')
self.assertEqual(len(list(qry)), 0)
qry = TestFruit.objects.filter(color__icontains='1234567890-=!@#$%^&*()_+qQWERWERTYuiopasdfghjklzxcvbnm')
self.assertEqual(len(list(qry)), 1)
qry = TestFruit.objects.filter(color__icontains='890-=!@#$')
self.assertEqual(len(list(qry)), 1)
qry = TestFruit.objects.filter(color__icontains='1234567890-=!@#$%^&*()_+qQWERwertyuI')
self.assertEqual(len(list(qry)), 1)
qry = TestFruit.objects.filter(color__icontains='8901')
self.assertEqual(len(list(qry)), 0)
def test_values_list_on_distinct(self):
TestFruit.objects.create(name="Kiwi", origin="New Zealand", color="Green")
TestFruit.objects.create(name="Apple", origin="New Zealand", color="Green")
TestFruit.objects.create(name="Grape", origin="New Zealand", color="Red")
nz_colours = TestFruit.objects.filter(
origin="New Zealand"
).distinct("color").values_list("color", flat=True)
self.assertEqual(sorted(nz_colours), ["Green", "Red"])
def test_empty_key_lookups_work_correctly(self):
t1 = TestFruit.objects.create(name="Kiwi", origin="New Zealand", color="Green")
TestFruit.objects.create(name="Apple", origin="New Zealand", color="Green")
self.assertEqual(t1,
TestFruit.objects.exclude(name="Apple").exclude(name="").get(name="Kiwi")
)
self.assertFalse(TestFruit.objects.filter(name="", color="Green"))
self.assertTrue(TestFruit.objects.filter(Q(name="") | Q(name="Kiwi")).filter(color="Green"))
self.assertFalse(TestFruit.objects.filter(name="", color__gt="A"))
self.assertEqual(4, TestFruit.objects.exclude(name="").count())
def test_additional_indexes_respected(self):
project, additional = indexing._project_special_indexes.copy(), indexing._app_special_indexes.copy()
try:
indexing._project_special_indexes = {}
indexing._app_special_indexes = {
TestFruit._meta.db_table: { "name": ["iexact"] }
}
t1 = TestFruit.objects.create(name="Kiwi", origin="New Zealand", color="Green")
self.assertEqual(t1, TestFruit.objects.filter(name__iexact="kiwi").get())
self.assertFalse(indexing._project_special_indexes) # Nothing was added
finally:
indexing._project_special_indexes = project
indexing._app_special_indexes = additional
class BlobstoreFileUploadHandlerTest(TestCase):
boundary = "===============7417945581544019063=="
def setUp(self):
super(BlobstoreFileUploadHandlerTest, self).setUp()
self.request = RequestFactory().get('/')
self.request.META = {
'wsgi.input': self._create_wsgi_input(),
'content-type': 'message/external-body; blob-key="PLOF0qOie14jzHWJXEa9HA=="; access-type="X-AppEngine-BlobKey"'
}
self.uploader = BlobstoreFileUploadHandler(self.request)
self.extra_content_type = {'blob-key': 'PLOF0qOie14jzHWJXEa9HA==', 'access-type': 'X-AppEngine-BlobKey'}
def _create_wsgi_input(self):
return StringIO('--===============7417945581544019063==\r\nContent-Type:'
' text/plain\r\nContent-Disposition: form-data;'
' name="field-nationality"\r\n\r\nAS\r\n'
'--===============7417945581544019063==\r\nContent-Type:'
' message/external-body; blob-key="PLOF0qOie14jzHWJXEa9HA==";'
' access-type="X-AppEngine-BlobKey"\r\nContent-Disposition:'
' form-data; name="field-file";'
' filename="Scan.tiff"\r\n\r\nContent-Type: image/tiff'
'\r\nContent-Length: 19837164\r\nContent-MD5:'
' YjI1M2Q5NjM5YzdlMzUxYjMyMjA0ZTIxZjAyNzdiM2Q=\r\ncontent-disposition:'
' form-data; name="field-file";'
' filename="Scan.tiff"\r\nX-AppEngine-Upload-Creation: 2014-03-07'
' 14:48:03.246607\r\n\r\n\r\n'
'--===============7417945581544019063==\r\nContent-Type:'
' text/plain\r\nContent-Disposition: form-data;'
' name="field-number"\r\n\r\n6\r\n'
'--===============7417945581544019063==\r\nContent-Type:'
' text/plain\r\nContent-Disposition: form-data;'
' name="field-salutation"\r\n\r\nmrs\r\n'
'--===============7417945581544019063==--')
def test_non_existing_files_do_not_get_created(self):
file_field_name = 'field-file'
length = len(self._create_wsgi_input().read())
self.uploader.handle_raw_input(self.request.META['wsgi.input'], self.request.META, length, self.boundary, "utf-8")
self.assertRaises(StopFutureHandlers, self.uploader.new_file, file_field_name,
'file_name', None, None, None, self.extra_content_type)
self.assertRaises(EntityNotFoundError, self.uploader.file_complete, None)
def test_blob_key_creation(self):
file_field_name = 'field-file'
length = len(self._create_wsgi_input().read())
self.uploader.handle_raw_input(self.request.META['wsgi.input'], self.request.META, length, self.boundary, "utf-8")
self.assertRaises(
StopFutureHandlers,
self.uploader.new_file, file_field_name, 'file_name', None, None, None, self.extra_content_type
)
self.assertIsNotNone(self.uploader.blobkey)
def test_blobstore_upload_url_templatetag(self):
template = """{% load storage %}{% blobstore_upload_url '/something/' %}"""
response = Template(template).render(Context({}))
self.assertTrue(response.startswith("http://localhost:8080/_ah/upload/"))
class DatastorePaginatorTest(TestCase):
def setUp(self):
super(DatastorePaginatorTest, self).setUp()
for i in range(15):
PaginatorModel.objects.create(foo=i)
def test_basic_usage(self):
def qs():
return PaginatorModel.objects.all().order_by('foo')
p1 = paginator.DatastorePaginator(qs(), 5).page(1)
self.assertFalse(p1.has_previous())
self.assertTrue(p1.has_next())
self.assertEqual(p1.start_index(), 1)
self.assertEqual(p1.end_index(), 5)
self.assertEqual(p1.next_page_number(), 2)
self.assertEqual([x.foo for x in p1], [0, 1, 2, 3, 4])
p2 = paginator.DatastorePaginator(qs(), 5).page(2)
self.assertTrue(p2.has_previous())
self.assertTrue(p2.has_next())
self.assertEqual(p2.start_index(), 6)
self.assertEqual(p2.end_index(), 10)
self.assertEqual(p2.previous_page_number(), 1)
self.assertEqual(p2.next_page_number(), 3)
self.assertEqual([x.foo for x in p2], [5, 6, 7, 8, 9])
p3 = paginator.DatastorePaginator(qs(), 5).page(3)
self.assertTrue(p3.has_previous())
self.assertFalse(p3.has_next())
self.assertEqual(p3.start_index(), 11)
self.assertEqual(p3.end_index(), 15)
self.assertEqual(p3.previous_page_number(), 2)
self.assertEqual([x.foo for x in p3], [10, 11, 12, 13, 14])
def test_empty(self):
qs = PaginatorModel.objects.none()
p1 = paginator.DatastorePaginator(qs, 5).page(1)
self.assertFalse(p1.has_previous())
self.assertFalse(p1.has_next())
self.assertEqual(p1.start_index(), 0)
self.assertEqual(p1.end_index(), 0)
self.assertEqual([x for x in p1], [])
class TestSpecialIndexers(TestCase):
def setUp(self):
super(TestSpecialIndexers, self).setUp()
self.names = [
'Ola', 'Adam', 'Luke', 'rob', 'Daniel', 'Ela', 'Olga', 'olek',
'ola', 'Olaaa', 'OlaaA', 'Ola + Ola', '-Test-', '-test-'
]
for name in self.names:
SpecialIndexesModel.objects.create(name=name)
self.lists = [
self.names,
['Name', 'name', 'name + name'],
['-Tesst-'],
['-test-']
]
for i, sample_list in enumerate(self.lists):
SpecialIndexesModel.objects.create(name=i, sample_list=sample_list)
self.qry = SpecialIndexesModel.objects.all()
def test_iexact_lookup(self):
for name in self.names:
qry = self.qry.filter(name__iexact=name)
self.assertEqual(len(qry), len([x for x in self.names if x.lower() == name.lower()]))
def test_contains_lookup_and_icontains_lookup(self):
tests = self.names + ['o', 'O', 'la']
for name in tests:
qry = self.qry.filter(name__contains=name)
self.assertEqual(len(qry), len([x for x in self.names if name in x]))
qry = self.qry.filter(name__icontains=name)
self.assertEqual(len(qry), len([x for x in self.names if name.lower() in x.lower()]))
def test_contains_lookup_on_charfield_subclass(self):
""" Test that the __contains lookup also works on subclasses of the Django CharField, e.g.
the custom Djangae CharField.
"""
instance = SpecialIndexesModel.objects.create(name="whatever", nickname="Voldemort")
query = SpecialIndexesModel.objects.filter(nickname__contains="demo")
self.assertEqual(list(query), [instance])
def test_endswith_lookup_and_iendswith_lookup(self):
tests = self.names + ['a', 'A', 'aa']
for name in tests:
qry = self.qry.filter(name__endswith=name)
self.assertEqual(len(qry), len([x for x in self.names if x.endswith(name)]))
qry = self.qry.filter(name__iendswith=name)
self.assertEqual(len(qry), len([x for x in self.names if x.lower().endswith(name.lower())]))
def test_startswith_lookup_and_istartswith_lookup(self):
tests = self.names + ['O', 'o', 'ola']
for name in tests:
qry = self.qry.filter(name__startswith=name)
self.assertEqual(len(qry), len([x for x in self.names if x.startswith(name)]))
qry = self.qry.filter(name__istartswith=name)
self.assertEqual(len(qry), len([x for x in self.names if x.lower().startswith(name.lower())]))
def test_regex_lookup_and_iregex_lookup(self):
tests = ['([A-Z])\w+', '([A-Z])\w+\s[+]\s([A-Z])\w+', '\-Test\-']
for pattern in tests:
qry = self.qry.filter(name__regex=pattern)
self.assertEqual(len(qry), len([x for x in self.names if re.search(pattern, x)]))
qry = self.qry.filter(name__iregex=pattern)
self.assertEqual(len(qry), len([x for x in self.names if re.search(pattern, x, flags=re.I)]))
# Check that the same works for ListField and SetField too
qry = self.qry.filter(sample_list__item__regex=pattern)
expected = [sample_list for sample_list in self.lists if any([bool(re.search(pattern, x)) for x in sample_list])]
self.assertEqual(len(qry), len(expected))
qry = self.qry.filter(sample_list__item__iregex=pattern)
expected = [sample_list for sample_list in self.lists if any([bool(re.search(pattern, x, flags=re.I)) for x in sample_list])]
self.assertEqual(len(qry), len(expected))
def test_item_contains_item_icontains_lookup(self):
tests = ['O', 'la', 'ola']
for text in tests:
qry = self.qry.filter(sample_list__item__contains=text)
self.assertEqual(len(qry), 1)
qry = self.qry.filter(sample_list__item__icontains=text)
self.assertEqual(len(qry), 1)
def test_item_startswith_item_istartswith_lookup(self):
tests = ['O', 'ola', 'Ola']
for text in tests:
qry = self.qry.filter(sample_list__item__startswith=text)
self.assertEqual(len(qry), 1)
qry = self.qry.filter(sample_list__item__istartswith=text)
self.assertEqual(len(qry), 1)
def test_item_endswith_item_iendswith_lookup(self):
tests = ['a', 'la', 'Ola']
for text in tests:
qry = self.qry.filter(sample_list__item__endswith=text)
self.assertEqual(len(qry), 1)
qry = self.qry.filter(sample_list__item__iendswith=text)
self.assertEqual(len(qry), 1)
class SliceModel(models.Model):
field1 = models.CharField(max_length=32)
class SlicingTests(TestCase):
def test_big_slice(self):
SliceModel.objects.create(field1="test")
SliceModel.objects.create(field1="test2")
self.assertFalse(
SliceModel.objects.filter(field1__in=["test", "test2"])[9999:]
)
self.assertFalse(
SliceModel.objects.filter(field1__in=["test", "test2"])[9999:10000]
)
def test_slicing_multi_query(self):
SliceModel.objects.create(field1="test")
SliceModel.objects.create(field1="test2")
self.assertEqual(
1,
len(SliceModel.objects.filter(field1__in=["test", "test2"])[1:])
)
self.assertEqual(
1,
len(SliceModel.objects.filter(field1__in=["test", "test2"])[:1])
)
self.assertEqual(
2,
len(SliceModel.objects.filter(field1__in=["test", "test2"])[:2])
)
self.assertEqual(
0,
len(SliceModel.objects.filter(field1__in=["test", "test2"])[2:])
)
def test_slice_params_are_passed_to_query(self):
for i in range(15):
SliceModel.objects.create(field1=str(i))
with sleuth.watch('google.appengine.api.datastore.Query.Run') as Run:
qs = SliceModel.objects.order_by("field1")[:5]
self.assertEqual(5, len(list(qs)))
self.assertEqual(Run.calls[0].kwargs['limit'], 5)
self.assertEqual(Run.calls[0].kwargs['offset'], 0)
qs = SliceModel.objects.order_by("field1")[5:]
self.assertEqual(10, len(list(qs)))
self.assertEqual(Run.calls[1].kwargs['limit'], None)
self.assertEqual(Run.calls[1].kwargs['offset'], 5)
qs = SliceModel.objects.order_by("field1")[5:10]
self.assertEqual(5, len(list(qs)))
self.assertEqual(Run.calls[2].kwargs['limit'], 5)
self.assertEqual(Run.calls[2].kwargs['offset'], 5)
class NamespaceTests(TestCase):
multi_db = True
@skipIf("ns1" not in settings.DATABASES, "This test is designed for the Djangae testapp settings")
def test_database_specific_namespaces(self):
TestFruit.objects.create(name="Apple", color="Red")
TestFruit.objects.create(name="Orange", color="Orange")
TestFruit.objects.using("ns1").create(name="Apple", color="Red")
self.assertEqual(1, TestFruit.objects.using("ns1").count())
self.assertEqual(2, TestFruit.objects.count())
with self.assertRaises(TestFruit.DoesNotExist):
TestFruit.objects.using("ns1").get(name="Orange")
try:
TestFruit.objects.get(name="Orange")
except TestFruit.DoesNotExist:
self.fail("Unable to retrieve fruit from the default namespace")
self.assertEqual(1, TestFruit.objects.filter(name="Orange", color="Orange").count())
self.assertEqual(0, TestFruit.objects.using("ns1").filter(name="Orange", color="Orange").count())
def test_no_database_namespace_defaults_to_empty(self):
"""
Test that creating an object without a namespace makes one that is
retrievable with just a kind and ID
"""
TestFruit.objects.using("nonamespace").create(name="Apple", color="Red")
key = datastore.Key.from_path(TestFruit._meta.db_table, "Apple")
self.assertTrue(datastore.Get([key])[0])
@skipIf("nonamespace" not in settings.DATABASES, "This test is designed for the Djangae testapp settings")
def test_move_objects_between_namespaces(self):
objs = [
TestFruit.objects.create(name="Banana", color="Black"),
TestFruit.objects.create(name="Tomato", color="Red"),
]
# First, check that these objects do not exist in the other namespace.
# We check this in several ways to check that the namespace functionality works in the
# various different commands of the DB backend
other_qs = TestFruit.objects.using("nonamespace")
self.assertEqual(len(other_qs.all()), 0)
self.assertEqual(other_qs.count(), 0)
for obj in objs:
self.assertRaises(TestFruit.DoesNotExist, other_qs.get, name=obj.name)
# Now re-save both of the objects into the other namespace
for obj in objs:
obj.save(using="nonamespace")
# And now check that they DO exist in that namespace
self.assertEqual(len(other_qs.all()), 2)
self.assertEqual(other_qs.count(), 2)
for obj in objs:
self.assertEqual(other_qs.get(name=obj.name), obj)
# Now delete the objects from the original (default) namespace
TestFruit.objects.all().delete()
# And now make sure that they exist ONLY in the other namespace
self.assertEqual(len(TestFruit.objects.all()), 0)
self.assertEqual(len(other_qs.all()), 2)
self.assertEqual(TestFruit.objects.count(), 0)
self.assertEqual(other_qs.count(), 2)
for obj in objs:
self.assertRaises(TestFruit.DoesNotExist, TestFruit.objects.get, name=obj.name)
self.assertEqual(other_qs.get(name=obj.name), obj)
def deferred_func():
pass
class TestHelperTests(TestCase):
def test_inconsistent_db(self):
with inconsistent_db():
fruit = TestFruit.objects.create(name="banana")
self.assertEqual(0, TestFruit.objects.count()) # Inconsistent query
self.assertEqual(1, TestFruit.objects.filter(pk=fruit.pk).count()) #Consistent query
def test_processing_tasks(self):
from google.appengine.api import apiproxy_stub_map
stub = apiproxy_stub_map.apiproxy.GetStub("taskqueue")
stub._queues[None]._ConstructQueue("another") # Add a test queue
stub._queues[None]._queue_yaml_parser = None # Make it so that the taskqueue stub doesn't reload from YAML
self.assertNumTasksEquals(0) #No tasks
deferred.defer(deferred_func)
self.assertNumTasksEquals(1, queue_name='default')
deferred.defer(deferred_func, _queue='another')
self.assertNumTasksEquals(1, queue_name='another')
taskqueue.add(url='/')
self.assertNumTasksEquals(2, queue_name='default')
self.process_task_queues()
self.assertNumTasksEquals(0) #No tasks
class Zoo(models.Model):
pass
class Enclosure(models.Model):
zoo = models.ForeignKey(Zoo)
class Animal(models.Model):
enclosure = models.ForeignKey(Enclosure)
class CascadeDeletionTests(TestCase):
def test_deleting_more_than_30_items(self):
zoo = Zoo.objects.create()
for i in range(40):
enclosure = Enclosure.objects.create(zoo=zoo)
for i in range(2):
Animal.objects.create(enclosure=enclosure)
self.assertEqual(Animal.objects.count(), 80)
zoo.delete()
self.assertEqual(Enclosure.objects.count(), 0)
self.assertEqual(Animal.objects.count(), 0)
| grzes/djangae | djangae/tests/test_connector.py | Python | bsd-3-clause | 94,756 |
import os
import sys
import warnings
try:
import requests
_has_network = True
except ImportError:
_has_network = False
warnings.warn("Cannot use networked config support. Install requests to enable it.", ImportWarning)
# Hack for Python3.2 and below
if sys.version_info[1] <= 2:
FileNotFoundError = IOError
from configmaster import ConfigKey
from configmaster import exc
def networked_dump_hook(*args, **kwargs):
raise exc.NetworkedFileException("Cannot write to a networked file.")
class ConfigObject(object):
"""
The abstract base class for a Config object.
All types of config file extend from this.
This provides several methods that don't need to be re-implemented in sub classes.
Notes:
- This provides an access to the data to load via a self.data attribute.
- Need to call the load/dump hooks? Get them via load_hook or dump_hook.
"""
def __init__(self, safe_load: bool=True, load_hook=None, dump_hook=None, **kwargs):
self.safe_load = safe_load
self.load_hook = load_hook
self.dump_hook = dump_hook
self.config = ConfigKey.ConfigKey(safe_load)
self.data = None
def dumps(self) -> str:
"""
Abstract dump to string method.
"""
raise NotImplementedError
def dumpd(self) -> dict:
"""
Dump config data to a dictionary.
"""
return self.config.dump()
def load(self, **kwargs):
"""
This loads the config file using the hook provided. The ConfigObject object is passed in as argument one.
"""
return self.load_hook(self, **kwargs)
def dump(self):
"""
This dumps the config file using the hook provided. The ConfigObject is passed in as argument one.
"""
return self.dump_hook(self)
def initial_populate(self, data):
"""
Populate a newly created config object with data.
If it was populated, this returns True. If it wasn't, this returns False.
It is recommended to run a .dump() and .reload() after running this.
"""
if self.config.parsed:
return False
# Otherwise, create a new ConfigKey.
self.config.load_from_dict(data)
return True
def apply_defaults(self, other_config):
"""
Applies default values from a different ConfigObject or ConfigKey object to this ConfigObject.
If there are any values in this object that are also in the default object, it will use the values from this object.
"""
if isinstance(other_config, self.__class__):
self.config.load_from_dict(other_config.config, overwrite=False)
else:
self.config.load_from_dict(other_config, overwrite=False)
class ConfigFile(ConfigObject):
"""
The abstract base class for a ConfigFile object. All config files extend from this.
It automatically provides opening of the file and creating it if it doesn't exist, and provides a basic reload() method to automatically reload the files from disk.
"""
def __init__(self, fd: str, load_hook=None, dump_hook=None, safe_load: bool=True, json_fix: bool=False, **kwargs):
super().__init__(safe_load, load_hook=load_hook, dump_hook=dump_hook)
# Check if fd is a string
if isinstance(fd, str):
self.path = fd.replace('/', '.').replace('\\', '.')
# Open the file.
try:
fd = open(fd)
except FileNotFoundError:
# Make sure the directory exists.
if not os.path.exists('/'.join(fd.split('/')[:-1])) and '/' in fd:
os.makedirs('/'.join(fd.split('/')[:-1]))
if not json_fix:
# Open it in write mode, and close it.
open(fd, 'w').close()
else:
# Open it in write mode, write "{}" to it, and close it.
with open(fd, 'w') as f: f.write("{}")
fd = open(fd, 'r')
else:
self.path = fd.name.replace('/', '.').replace('\\', '.')
self.fd = fd
def _dump(*args, **kwargs):
raise exc.FiletypeNotSupportedException("YAML Dumper not loaded - hook not called?")
self.dumper = _dump
self.data = self.fd.read()
self.fd.seek(0)
self.load(**kwargs)
def dump(self):
# RE-open the file in 'w' mode.
if not self.fd.writable():
name = self.fd.name
self.fd.close()
self.fd = open(name, 'w')
# Call the dump hook.
self.dump_hook(self)
# RE-open the file in 'r' mode.
name = self.fd.name
self.fd.close()
self.fd = open(name, 'r')
def dumps(self):
"""
Dump config data to string.
This uses a StringIO virtual file, to ensure compatibility with dump hooks that use file-based dumping.
"""
return self.dump_hook(self, True)
def reload(self):
"""
Automatically reloads the config file.
This is just an alias for self.load()."""
if not self.fd.closed: self.fd.close()
self.fd = open(self.fd.name, 'r')
self.load()
class NetworkedConfigObject(ConfigObject):
"""
An abstract Networked Config object.
This is commonly used for downloading "default" config files, and applying them to real config files.
"""
def __init__(self, url: str, normal_class_load_hook, normal_class_dump_hook, load_hook, safe_load: bool=True,
**kwargs):
if _has_network is False:
raise exc.NetworkedFileException("Requests is not installed.")
self.url = url
# Try and get url.
try:
self.request = requests.get(self.url)
except requests.exceptions.ConnectionError as e:
raise exc.NetworkedFileException("Failed to download file: {}".format(e))
if self.request.status_code != 200:
raise exc.NetworkedFileException("Failed to download file: Status code responded was {}".format(self.request.status_code))
super().__init__(safe_load=safe_load, load_hook=load_hook)
self.normal_class_hook = (normal_class_load_hook, normal_class_dump_hook)
self.data = self.request.text
self.load(**kwargs)
def dump(self):
raise exc.NetworkedFileException("Cannot write to a networked file.")
def initial_populate(self, data):
raise exc.NetworkedFileException("Cannot write to a networked file.")
def save_to_file(self, filename: str) -> ConfigFile:
"""
This converts the NetworkedConfigFile into a normal ConfigFile object.
This requires the normal class hooks to be provided.
"""
newclass = ConfigFile(fd=filename, load_hook=self.normal_class_hook[0],
dump_hook=self.normal_class_hook[1], safe_load=self.safe_load)
return newclass
| SunDwarf/ConfigMaster | configmaster/ConfigFile.py | Python | mit | 7,050 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Coordinate importing script.
Usage:
python coordinate_import.py -lang:en -family:wikipedia -cat:Category:Coordinates_not_on_Wikidata
This will work on all pages in the category "coordinates not on Wikidata" and
will import the coordinates on these pages to Wikidata.
The data from the "GeoData" extension (https://www.mediawiki.org/wiki/Extension:GeoData)
is used so that extension has to be setup properly. You can look at the
[[Special:Nearby]] page on your local Wiki to see if it's populated.
You can use any typical pagegenerator to provide with a list of pages:
python coordinate_import.py -lang:it -family:wikipedia -transcludes:Infobox_stazione_ferroviaria -namespace:0
¶ms;
"""
#
# (C) Multichill, 2014
# (C) Pywikibot team, 2013-2014
#
# Distributed under the terms of MIT License.
#
from __future__ import unicode_literals
__version__ = '$Id: 22541c14ae54c4afd5a1fa4fa2f3b4f499476d5d $'
#
import pywikibot
from pywikibot import pagegenerators, WikidataBot
from pywikibot.exceptions import CoordinateGlobeUnknownException
class CoordImportRobot(WikidataBot):
"""A bot to import coordinates to Wikidata."""
def __init__(self, generator):
"""
Constructor.
Arguments:
* generator - A generator that yields Page objects.
"""
super(CoordImportRobot, self).__init__()
self.generator = pagegenerators.PreloadingGenerator(generator)
self.cacheSources()
self.prop = 'P625'
def has_coord_qualifier(self, claims):
"""
Check if self.prop is used as property for a qualifier.
@param claims: the Wikibase claims to check in
@type claims: dict
@return: the first property for which self.prop
is used as qualifier, or None if any
@returntype: unicode or None
"""
for prop in claims:
for claim in claims[prop]:
if self.prop in claim.qualifiers:
return prop
def treat(self, page, item):
"""Treat page/item."""
self.current_page = page
coordinate = page.coordinates(primary_only=True)
if not coordinate:
return
claims = item.get().get('claims')
if self.prop in claims:
pywikibot.output(u'Item %s already contains coordinates (%s)'
% (item.title(), self.prop))
return
prop = self.has_coord_qualifier(claims)
if prop:
pywikibot.output(u'Item %s already contains coordinates'
u' (%s) as qualifier for %s'
% (item.title(), self.prop, prop))
return
newclaim = pywikibot.Claim(self.repo, self.prop)
newclaim.setTarget(coordinate)
pywikibot.output(u'Adding %s, %s to %s' % (coordinate.lat,
coordinate.lon,
item.title()))
try:
item.addClaim(newclaim)
source = self.getSource(page.site)
if source:
newclaim.addSource(source, bot=True)
except CoordinateGlobeUnknownException as e:
pywikibot.output(u'Skipping unsupported globe: %s' % e.args)
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
# Process global args and prepare generator args parser
local_args = pywikibot.handle_args(args)
generator_factory = pagegenerators.GeneratorFactory()
for arg in local_args:
if generator_factory.handleArg(arg):
continue
generator = generator_factory.getCombinedGenerator()
if generator:
coordbot = CoordImportRobot(generator)
coordbot.run()
else:
pywikibot.showHelp()
if __name__ == "__main__":
main()
| hperala/kontuwikibot | scripts/coordinate_import.py | Python | mit | 4,024 |
# -*- coding: utf-8 -*-
# Copyright 2008-2013 Jaap Karssenberg <[email protected]>
'''Base class and API for plugins
Zim plugins are simply python modules that contain a sub-class of
L{PluginClass}. They get a reference the the main application object
running the interface and from there can link to various objects and
widgets. The base class has convenience methods for common actions
for plugins.
Also see the HACKING notebook in the source distribution for some
notes on writing new plugins.
@note: sub-modules should contain one and exactly one subclass of
L{PluginClass}. This is because this class is detected automatically
when loading the plugin. This means you can also not import classes of
other plugins directly into the module.
'''
from __future__ import with_statement
import gobject
import types
import os
import sys
import logging
import inspect
import collections
import zim.fs
from zim.fs import Dir
from zim.signals import SignalEmitter, ConnectorMixin, SIGNAL_AFTER, SignalHandler
from zim.actions import action, toggle_action, get_gtk_actiongroup
from zim.utils import classproperty, get_module, lookup_subclass, WeakSet
from zim.config import VirtualConfigManager
logger = logging.getLogger('zim.plugins')
def user_site_packages_directory():
'''Get the per user site-packages directory
In Python 2.6 the "Per-user site-packages Directory" feature has
been introduced, see
U{http://docs.python.org/whatsnew/2.6.html#pep-370-per-user-site-packages-directory}.
This function backports this feature to Python 2.5.
@returns: the per user site-packages directory.
This directoy is part of the search path for plugin modules, so users
can install plugins in locally.
'''
from zim.environ import environ
if os.name == 'nt':
appdata = environ.get('APPDATA')
if appdata:
dir = Dir((appdata, 'Python/Python25/site-packages'))
return dir.path
else:
return None
else:
dir = Dir('~/.local/lib/python2.5/site-packages')
return dir.path
# Add the per-user site-packages directory to the system path
if sys.version_info[0:2] == (2, 5):
userdir = user_site_packages_directory()
if userdir and not userdir in sys.path:
sys.path.insert(0, userdir)
def set_plugin_search_path():
'''Initialize C{__path__} variable with the search path for plugins
Sets C{__path__} for the C{zim.plugins} module. This determines what
directories are searched when importing plugin packages in the
zim.plugins namespace. This function looks at C{sys.path} and would
need to be run again if C{sys.path} is modified after loading this
module.
'''
global __path__
__path__ = [] # flush completely
# We don't even keep the directory of this source file because we
# want order in __path__ match order in sys.path, so per-user
# folder takes proper precedence
for dir in sys.path:
try:
dir = dir.decode(zim.fs.ENCODING)
except UnicodeDecodeError:
logger.exception('Could not decode path "%s"', dir)
continue
if os.path.basename(dir) == 'zim.exe':
# path is an executable, not a folder -- examine containing folder
dir = os.path.dirname(dir)
if dir == '':
dir = '.'
dir = os.path.sep.join((dir, 'zim', 'plugins'))
#~ print '>> PLUGIN DIR', dir
__path__.append(dir)
# extend path for importing and searching plugins
set_plugin_search_path()
def get_plugin_class(name):
'''Get the plugin class for a given name
@param name: the plugin module name (e.g. "calendar")
@returns: the plugin class object
'''
mod = get_module('zim.plugins.' + name)
return lookup_subclass(mod, PluginClass)
def list_plugins():
'''List available plugin module names
@returns: a set of available plugin names that can be loaded
using L{get_plugin_class()}.
'''
# Only listing folders in __path__ because this parameter determines
# what folders will considered when importing sub-modules of the
# this package once this module is loaded.
plugins = set()
for dir in __path__:
dir = Dir(dir)
for candidate in dir.list(): # returns [] if dir does not exist
if candidate.startswith('_') or candidate == 'base':
continue
elif candidate.endswith('.py'):
#~ print '>> FOUND %s.py in %s' % (candidate, dir.path)
plugins.add(candidate[:-3])
elif zim.fs.isdir(dir.path+'/'+candidate) \
and os.path.exists(dir.path+'/'+candidate+'/__init__.py'):
#~ print '>> FOUND %s/__init__.py in %s' % (candidate, dir.path)
plugins.add(candidate)
else:
pass
return sorted(plugins)
class PluginManager(ConnectorMixin, collections.Mapping):
'''Manager that maintains a set of active plugins
Handles loading and destroying plugins and is the entry point
for extending application components.
This object behaves as a dictionary with plugin object names as
keys and plugin objects as value
'''
def __init__(self, config=None):
self.config = config or VirtualConfigManager()
self._preferences = \
self.config.get_config_dict('<profile>/preferences.conf')
self.general_preferences = self._preferences['General']
self.general_preferences.setdefault('plugins', [])
self._plugins = {}
self._extendables = WeakSet()
self._load_plugins()
self.connectto(self._preferences, 'changed',
self.on_preferences_changed)
def __getitem__(self, name):
return self._plugins[name]
def __iter__(self):
return iter(sorted(self._plugins.keys()))
# sort to make operation predictable - easier debugging
def __len__(self):
return len(self._plugins)
def _load_plugins(self):
'''Load plugins based on config'''
for name in sorted(self.general_preferences['plugins']):
try:
self.load_plugin(name)
except:
logger.exception('Exception while loading plugin: %s', name)
self.general_preferences['plugins'].remove(name)
@SignalHandler
def on_preferences_changed(self, o):
current = set(self._plugins.keys())
new = set(self.general_preferences['plugins'])
for name in current - new:
try:
self.remove_plugin(name)
except:
logger.exception('Exception while loading plugin: %s', name)
for name in new - current:
try:
self.load_plugin(name)
except:
logger.exception('Exception while loading plugin: %s', name)
self.general_preferences['plugins'].remove(name)
def load_plugin(self, name):
'''Load a single plugin by name
When the plugin was loaded already the existing object
will be returned. Thus for each plugin only one instance can be
active.
@param name: the plugin module name
@returns: the plugin object
@raises Exception: when loading the plugin failed
'''
assert isinstance(name, basestring)
if name in self._plugins:
return self._plugins[name]
logger.debug('Loading plugin: %s', name)
klass = get_plugin_class(name)
if not klass.check_dependencies_ok():
raise AssertionError, 'Dependencies failed for plugin %s' % name
plugin = klass(self.config)
self.connectto(plugin, 'extension-point-changed')
self._plugins[name] = plugin
for obj in self._extendables:
try:
plugin.extend(obj)
except:
logger.exception('Exception in plugin: %s', name)
if not name in self.general_preferences['plugins']:
with self.on_preferences_changed.blocked():
self.general_preferences['plugins'].append(name)
self.general_preferences.changed()
return plugin
def remove_plugin(self, name):
'''Remove a plugin and it's extensions
Fails silently if the plugin is not loaded.
@param name: the plugin module name
'''
if name in self.general_preferences['plugins']:
# Do this first regardless of exceptions etc.
with self.on_preferences_changed.blocked():
self.general_preferences['plugins'].remove(name)
self.general_preferences.changed()
try:
plugin = self._plugins.pop(name)
self.disconnect_from(plugin)
except KeyError:
pass
else:
logger.debug('Unloading plugin %s', name)
plugin.destroy()
def _foreach(self, func):
# sort to make operation predictable - easier debugging
for name, plugin in sorted(self._plugins.items()):
try:
func(plugin)
except:
logger.exception('Exception in plugin: %s', name)
def extend(self, obj):
'''Let any plugin extend the object instance C{obj}
Will also remember object (by a weak reference) such that
plugins loaded after this call will also be called to extend
C{obj} on their construction
@param obj: arbitrary object that can be extended by plugins
'''
if not obj in self._extendables:
self._foreach(lambda p: p.extend(obj))
self._extendables.add(obj)
def on_extension_point_changed(self, plugin, name):
for obj in self._extendables:
if obj.__class__.__name__ == name:
try:
plugin.extend(obj)
except:
logger.exception('Exception in plugin: %s', name)
class PluginClass(ConnectorMixin, SignalEmitter):
'''Base class for plugins. Every module containing a plugin should
have exactly one class derived from this base class. That class
will be initialized when the plugin is loaded.
Plugin classes should define two class attributes: L{plugin_info} and
L{plugin_preferences}.
This class inherits from L{ConnectorMixin} and calls
L{ConnectorMixin.disconnect_all()} when the plugin is destroyed.
Therefore it is highly recommended to use the L{ConnectorMixin}
methods in sub-classes.
@cvar plugin_info: A dict with basic information about the plugin,
it should contain at least the following keys:
- C{name}: short name
- C{description}: one paragraph description
- C{author}: name of the author
- C{help}: page name in the manual (optional)
This info will be used e.g. in the plugin tab of the preferences
dialog.
@cvar plugin_preferences: A tuple or list defining the global
preferences for this plugin. Each preference is defined by a 4-tuple
containing the following items:
1. the key in the config file
2. an option type (see InputForm.add_inputs for more details)
3. a label to show in the dialog
4. a default value
These preferences will be initialized to their default value if not
configured by the user and the values can be found in the
L{preferences} dict. The type and label will be used to render a
default configure dialog when triggered from the preferences dialog.
Changes to these preferences will be stored in a config file so
they are persistent.
@ivar ui: the main application object, e.g. an instance of
L{zim.gui.GtkInterface} or L{zim.www.WWWInterface}
@ivar preferences: a C{ConfigDict()} with plugin preferences
Preferences are the global configuration of the plugin, they are
stored in the X{preferences.conf} config file.
@ivar uistate: a C{ConfigDict()} with plugin ui state
The "uistate" is the per notebook state of the interface, it is
intended for stuff like the last folder opened by the user or the
size of a dialog after resizing. It is stored in the X{state.conf}
file in the notebook cache folder.
@signal: C{extension-point-changed (name)}: emitted when extension
point C{name} changes
'''
# define signals we want to use - (closure type, return type and arg types)
__signals__ = {
'extension-point-changed': (None, None, (basestring,))
}
plugin_info = {}
plugin_preferences = ()
@classproperty
def config_key(klass):
return klass.__name__
@classmethod
def check_dependencies_ok(klass):
'''Checks minimum dependencies are met
@returns: C{True} if this plugin can be loaded
'''
check, dependencies = klass.check_dependencies()
return check
@classmethod
def check_dependencies(klass):
'''Checks what dependencies are met and gives details
@returns: a boolean telling overall dependencies are met,
followed by a list with details.
This list consists of 3-tuples consisting of a (short)
description of the dependency, a boolean for dependency being
met, and a boolean for this dependency being optional or not.
@implementation: must be implemented in sub-classes that have
one or more (external) dependencies.
'''
return (True, [])
def __init__(self, config=None):
assert 'name' in self.plugin_info
assert 'description' in self.plugin_info
assert 'author' in self.plugin_info
self.extensions = WeakSet()
if self.plugin_preferences:
assert isinstance(self.plugin_preferences[0], tuple), 'BUG: preferences should be defined as tuples'
self.config = config or VirtualConfigManager()
self.preferences = self.config.get_config_dict('<profile>/preferences.conf')[self.config_key]
for pref in self.plugin_preferences:
if len(pref) == 4:
key, type, label, default = pref
self.preferences.setdefault(key, default)
#~ print ">>>>", key, default, '--', self.preferences[key]
else:
key, type, label, default, check = pref
self.preferences.setdefault(key, default, check=check)
#~ print ">>>>", key, default, check, '--', self.preferences[key]
self.load_extensions_classes()
@classmethod
def lookup_subclass(pluginklass, klass):
'''Returns first subclass of C{klass} found in the module of
this plugin. (Similar to L{zim.utils.lookup_subclass})
@param pluginklass: plugin class
@param klass: base class of the wanted class
'''
module = get_module(pluginklass.__module__)
return lookup_subclass(module, klass)
def load_extensions_classes(self):
self.extension_classes = {}
for name, klass in self.discover_extensions_classes():
self.add_extension_class(name, klass)
@classmethod
def discover_extensions_classes(pluginklass):
# Find related extension classes in same module
# any class with the "__extends__" field will be added
# (Being subclass of ObjectExtension is optional)
module = get_module(pluginklass.__module__)
for n, klass in inspect.getmembers(module, inspect.isclass):
if hasattr(klass, '__extends__') and klass.__extends__:
yield klass.__extends__, klass
def set_extension_class(self, name, klass):
if name in self.extension_classes:
if self.extension_classes[name] == klass:
pass
else:
self.remove_extension_class(name)
self.add_extension_class(name, klass)
else:
self.add_extension_class(name, klass)
def add_extension_class(self, name, klass):
if name in self.extension_classes:
raise AssertionError, 'Extension point %s already in use' % name
self.extension_classes[name] = klass
self.emit('extension-point-changed', name)
def remove_extension_class(self, name):
klass = self.extension_classes.pop(name)
for obj in self.get_extensions(klass):
obj.destroy()
def extend(self, obj, name=None):
# TODO also check parent classes
# name should only be used for testing
name = name or obj.__class__.__name__
if name in self.extension_classes:
ext = self.extension_classes[name](self, obj)
self.extensions.add(ext)
def get_extension(self, klass, **attr):
ext = self.get_extensions(klass)
for key, value in attr.items():
ext = filter(lambda e: getattr(e, key) == value, ext)
if len(ext) > 1:
raise AssertionError, 'BUG: multiple extensions of class %s found' % klass
elif ext:
return ext[0]
else:
return None
def get_extensions(self, klass):
return [e for e in self.extensions if isinstance(e, klass)]
def destroy(self):
'''Destroy the plugin object and all extensions
It is only called when a user actually disables the plugin,
not when the application exits.
Destroys all active extensions and disconnects all signals.
This should revert any changes the plugin made to the
application (although preferences etc. can be left in place).
'''
for obj in self.extensions:
obj.destroy()
try:
self.disconnect_all()
except:
logger.exception('Exception while disconnecting %s', self)
def extends(klass, autoload=True):
'''Decorator for extension classes
Use this decorator to add extensions to the plugin.
Takes either a class or a class name for the class to be
extended. When the plugin gets an object of this class a new
extension object will be constructed.
'''
if isinstance(klass, basestring):
name = klass
else:
name = klass.__name__
def inner(myklass):
if autoload:
myklass.__extends__ = name
# else: do nothing for now
return myklass
return inner
class ObjectExtension(SignalEmitter, ConnectorMixin):
def __init__(self, plugin, obj):
self.plugin = plugin
self.obj = obj
# Make sure extension has same lifetime as object being extended
if not hasattr(obj, '__zim_extension_objects__'):
obj.__zim_extension_objects__ = []
obj.__zim_extension_objects__.append(self)
def destroy(self):
'''Called when the plugin is being destroyed
Calls L{teardown()} followed by the C{teardown()} methods of
base classes.
'''
def walk(klass):
yield klass
for base in klass.__bases__:
if issubclass(base, ObjectExtension):
for k in walk(base): # recurs
yield k
for klass in walk(self.__class__):
try:
klass.teardown(self)
except:
logger.exception('Exception while disconnecting %s (%s)', self, klass)
# in case you are wondering: issubclass(Foo, Foo) evaluates True
try:
self.obj.__zim_extension_objects__.remove(self)
except AttributeError:
pass
except ValueError:
pass
self.plugin.extensions.discard(self)
# HACK avoid waiting for garbage collection to take place
def teardown(self):
'''Remove changes made by B{this} class from the extended object
To be overloaded by child classes
@note: do not call parent class C{remove()} here, that is
already taken care of by C{destroy()}
'''
self.disconnect_all()
class WindowExtension(ObjectExtension):
def __init__(self, plugin, window):
ObjectExtension.__init__(self, plugin, window)
self.window = window
if hasattr(window, 'ui') and hasattr(window.ui, 'uistate') and window.ui.uistate: # XXX
self.uistate = window.ui.uistate[plugin.config_key]
else:
self.uistate = None
if hasattr(self, 'uimanager_xml'):
# XXX TODO move uimanager to window
actiongroup = get_gtk_actiongroup(self)
self.window.ui.uimanager.insert_action_group(actiongroup, 0)
self._uimanager_id = self.window.ui.uimanager.add_ui_from_string(self.uimanager_xml)
self.connectto(window, 'destroy')
def on_destroy(self, window):
self.destroy()
def teardown(self):
# TODO move uimanager to window
if hasattr(self, '_uimanager_id') \
and self._uimanager_id is not None:
self.window.ui.uimanager.remove_ui(self._uimanager_id)
self._uimanager_id = None
if hasattr(self, 'actiongroup') \
and self.actiongroup is not None:
self.window.ui.uimanager.remove_action_group(self.actiongroup)
class DialogExtension(WindowExtension):
def __init__(self, plugin, window):
assert hasattr(window, 'action_area'), 'Not a dialog: %s' % window
WindowExtension.__init__(self, plugin, window)
self._dialog_buttons = []
def add_dialog_button(self, button):
# This logic adds the button to the action area and places
# it left of the left most primary button by reshuffling all
# other buttons after adding the new one
#
# TODO: check if this works correctly in RTL configuration
self.window.action_area.pack_end(button, False) # puts button in right most position
self._dialog_buttons.append(button)
buttons = [b for b in self.window.action_area.get_children()
if not self.window.action_area.child_get_property(b, 'secondary')]
for b in buttons:
if b is not button:
self.window.action_area.reorder_child(b, -1) # reshuffle to the right
def teardown(self):
for b in self._dialog_buttons:
self.window.action_area.remove(b)
| gdw2/zim | zim/plugins/__init__.py | Python | gpl-2.0 | 19,510 |
# Copyright (c) 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import mock
from mock import patch
from neutron_lib.db import api as db_api
from oslo_config import cfg
from neutron.db.models.plugins.ml2 import vlanallocation
from neutron.tests.unit import testlib_api
from networking_arista.ml2.drivers.driver_helpers import VlanSyncService
from networking_arista.ml2.drivers.type_arista_vlan import AristaVlanTypeDriver
import networking_arista.tests.unit.ml2.utils as utils
EAPI_SEND_FUNC = ('networking_arista.ml2.rpc.arista_eapi.AristaRPCWrapperEapi'
'._send_eapi_req')
class AristaTypeDriverTest(testlib_api.SqlTestCase):
def setUp(self):
super(AristaTypeDriverTest, self).setUp()
utils.setup_arista_wrapper_config(cfg)
@patch(EAPI_SEND_FUNC)
def test_initialize_type_driver(self, mock_send_eapi_req):
type_driver = AristaVlanTypeDriver()
type_driver.sync_service._force_sync = False
type_driver.sync_service._vlan_assignment_uuid = {'uuid': 1}
type_driver.sync_service._rpc = mock.MagicMock()
rpc = type_driver.sync_service._rpc
rpc.get_vlan_assignment_uuid.return_value = {'uuid': 1}
type_driver.initialize()
cmds = ['show openstack agent uuid',
'show openstack instances',
'show openstack agent uuid',
'show openstack features']
calls = [mock.call(cmds=[cmd], commands_to_log=[cmd])
for cmd in cmds]
mock_send_eapi_req.assert_has_calls(calls)
type_driver.timer.cancel()
class VlanSyncServiceTest(testlib_api.SqlTestCase):
"""Test that VLANs are synchronized between EOS and Neutron."""
def _ensure_in_db(self, assigned, allocated, available):
session = db_api.get_reader_session()
with session.begin():
vlans = session.query(vlanallocation.VlanAllocation).all()
for vlan in vlans:
self.assertIn(vlan.vlan_id, assigned)
if vlan.vlan_id in available:
self.assertFalse(vlan.allocated)
elif vlan.vlan_id in allocated:
self.assertTrue(vlan.allocated)
def test_synchronization_test(self):
rpc = mock.MagicMock()
rpc.get_vlan_allocation.return_value = {
'assignedVlans': '1-10,21-30',
'availableVlans': '1-5,21,23,25,27,29',
'allocatedVlans': '6-10,22,24,26,28,30'
}
assigned = list(itertools.chain(range(1, 11), range(21, 31)))
available = [1, 2, 3, 4, 5, 21, 23, 25, 27, 29]
allocated = list(set(assigned) - set(available))
sync_service = VlanSyncService(rpc)
sync_service.synchronize()
self._ensure_in_db(assigned, allocated, available)
# Call synchronize again which returns different data
rpc.get_vlan_allocation.return_value = {
'assignedVlans': '51-60,71-80',
'availableVlans': '51-55,71,73,75,77,79',
'allocatedVlans': '56-60,72,74,76,78,80'
}
assigned = list(itertools.chain(range(51, 61), range(71, 81)))
available = [51, 52, 53, 54, 55, 71, 73, 75, 77, 79]
allocated = list(set(assigned) - set(available))
sync_service = VlanSyncService(rpc)
sync_service.synchronize()
self._ensure_in_db(assigned, allocated, available)
| openstack/networking-arista | networking_arista/tests/unit/ml2/type_drivers/test_arista_type_driver.py | Python | apache-2.0 | 3,956 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from oslo_serialization import jsonutils
import uuid
from kuryr.lib import constants as lib_const
from kuryr.lib import utils as lib_utils
from kuryr_libnetwork import config
from kuryr_libnetwork import constants as const
from kuryr_libnetwork.controllers import app
from kuryr_libnetwork.tests.unit import base
FAKE_IP4_CIDR = '10.0.0.0/16'
@ddt.ddt
class TestKuryrIpam(base.TestKuryrBase):
"""Basic unit tests for libnetwork remote IPAM driver URI endpoints.
This test class covers the following HTTP methods and URIs as described in
the remote IPAM driver specification as below:
https://github.com/docker/libnetwork/blob/9bf339f27e9f5c7c922036706c9bcc410899f249/docs/ipam.md # noqa
- POST /IpamDriver.GetDefaultAddressSpaces
- POST /IpamDriver.RequestPool
- POST /IpamDriver.ReleasePool
- POST /IpamDriver.RequestAddress
- POST /IpamDriver.ReleaseAddress
"""
@ddt.data(
('/IpamDriver.GetDefaultAddressSpaces',
{"LocalDefaultAddressSpace":
config.CONF.local_default_address_space,
"GlobalDefaultAddressSpace":
config.CONF.global_default_address_space}),
('/IpamDriver.GetCapabilities',
{"RequiresMACAddress": False}))
@ddt.unpack
def test_remote_ipam_driver_endpoint(self, endpoint, expected):
response = self.app.post(endpoint)
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(expected, decoded_json)
def test_ipam_driver_request_pool_with_user_pool(self):
fake_subnet = {"subnets": []}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet)
pool_name = lib_utils.get_neutron_subnetpool_name(FAKE_IP4_CIDR)
new_subnetpool = {
'name': pool_name,
'default_prefixlen': 16,
'prefixes': [FAKE_IP4_CIDR]}
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = pool_name
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
app.neutron.list_subnetpools(name=fake_name).AndReturn(
{'subnetpools': []})
fake_subnetpool_response = {
'subnetpool': kuryr_subnetpools['subnetpools'][0]
}
self.mox.StubOutWithMock(app.neutron, 'create_subnetpool')
app.neutron.create_subnetpool(
{'subnetpool': new_subnetpool}).AndReturn(fake_subnetpool_response)
self.mox.ReplayAll()
fake_request = {
'AddressSpace': '',
'Pool': FAKE_IP4_CIDR,
'SubPool': '', # In the case --ip-range is not given
'Options': {},
'V6': False
}
response = self.app.post('/IpamDriver.RequestPool',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(fake_kuryr_subnetpool_id, decoded_json['PoolID'])
def test_ipam_driver_request_pool_with_pool_name_option(self):
fake_subnet = {"subnets": []}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet)
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = 'fake_pool_name'
new_subnetpool = {
'name': fake_name,
'default_prefixlen': 16,
'prefixes': [FAKE_IP4_CIDR]}
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
fake_subnetpool_response = {
'subnetpool': kuryr_subnetpools['subnetpools'][0]
}
self.mox.StubOutWithMock(app.neutron, 'create_subnetpool')
app.neutron.create_subnetpool(
{'subnetpool': new_subnetpool}).AndReturn(fake_subnetpool_response)
self.mox.ReplayAll()
fake_request = {
'AddressSpace': '',
'Pool': FAKE_IP4_CIDR,
'SubPool': '', # In the case --ip-range is not given
'Options': {'neutron.pool.name': 'fake_pool_name'},
'V6': False
}
response = self.app.post('/IpamDriver.RequestPool',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(fake_kuryr_subnetpool_id, decoded_json['PoolID'])
def test_ipam_driver_request_pool_with_default_v6pool(self):
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = 'kuryr6'
kuryr_subnetpools = self._get_fake_v6_subnetpools(
fake_kuryr_subnetpool_id, prefixes=['fe80::/64'])
app.neutron.list_subnetpools(name=fake_name).AndReturn(
{'subnetpools': kuryr_subnetpools['subnetpools']})
self.mox.ReplayAll()
fake_request = {
'AddressSpace': '',
'Pool': '',
'SubPool': '', # In the case --ip-range is not given
'Options': {},
'V6': True
}
response = self.app.post('/IpamDriver.RequestPool',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(fake_kuryr_subnetpool_id, decoded_json['PoolID'])
def test_ipam_driver_release_pool(self):
fake_kuryr_subnetpool_id = str(uuid.uuid4())
self.mox.StubOutWithMock(app.neutron, 'delete_subnetpool')
app.neutron.delete_subnetpool(fake_kuryr_subnetpool_id).AndReturn(
{})
self.mox.ReplayAll()
fake_request = {
'PoolID': fake_kuryr_subnetpool_id
}
response = self.app.post('/IpamDriver.ReleasePool',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
def test_ipam_driver_request_address(self):
# faking list_subnetpools
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = lib_utils.get_neutron_subnetpool_name(FAKE_IP4_CIDR)
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
app.neutron.list_subnetpools(id=fake_kuryr_subnetpool_id).AndReturn(
kuryr_subnetpools)
# faking list_subnets
docker_endpoint_id = lib_utils.get_hash()
neutron_network_id = str(uuid.uuid4())
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
neutron_network_id, docker_endpoint_id, subnet_v4_id,
subnetpool_id=fake_kuryr_subnetpool_id,
cidr=FAKE_IP4_CIDR)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet_response)
# faking create_port
fake_neutron_port_id = str(uuid.uuid4())
fake_port = base.TestKuryrBase._get_fake_port(
docker_endpoint_id, neutron_network_id,
fake_neutron_port_id, lib_const.PORT_STATUS_ACTIVE,
subnet_v4_id,
neutron_subnet_v4_address="10.0.0.5")
port_request = {
'name': 'kuryr-unbound-port',
'admin_state_up': True,
'network_id': neutron_network_id,
'binding:host_id': lib_utils.get_hostname(),
}
fixed_ips = port_request['fixed_ips'] = []
fixed_ip = {'subnet_id': subnet_v4_id}
fixed_ips.append(fixed_ip)
self.mox.StubOutWithMock(app.neutron, 'create_port')
app.neutron.create_port({'port': port_request}).AndReturn(fake_port)
# Apply mocks
self.mox.ReplayAll()
# Testing container ip allocation
fake_request = {
'PoolID': fake_kuryr_subnetpool_id,
'Address': '', # Querying for container address
'Options': {}
}
response = self.app.post('/IpamDriver.RequestAddress',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual('10.0.0.5/16', decoded_json['Address'])
def test_ipam_driver_request_address_when_subnet_not_exist(self):
requested_address = '10.0.0.5'
# faking list_subnetpools
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = lib_utils.get_neutron_subnetpool_name(FAKE_IP4_CIDR)
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
app.neutron.list_subnetpools(id=fake_kuryr_subnetpool_id).AndReturn(
kuryr_subnetpools)
# faking list_subnets
fake_subnet_response = {'subnets': []}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet_response)
# Apply mocks
self.mox.ReplayAll()
# Testing container ip allocation
fake_request = {
'PoolID': fake_kuryr_subnetpool_id,
'Address': requested_address,
'Options': {}
}
response = self.app.post('/IpamDriver.RequestAddress',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(requested_address + '/16', decoded_json['Address'])
@ddt.data((False), (True))
def test_ipam_driver_request_specific_address(self, existing_port):
requested_address = '10.0.0.5'
# faking list_subnetpools
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = lib_utils.get_neutron_subnetpool_name(FAKE_IP4_CIDR)
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
app.neutron.list_subnetpools(id=fake_kuryr_subnetpool_id).AndReturn(
kuryr_subnetpools)
# faking list_subnets
docker_endpoint_id = lib_utils.get_hash()
neutron_network_id = str(uuid.uuid4())
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
neutron_network_id, docker_endpoint_id, subnet_v4_id,
subnetpool_id=fake_kuryr_subnetpool_id,
cidr=FAKE_IP4_CIDR)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet_response)
self.mox.StubOutWithMock(app.neutron, 'list_ports')
# faking update_port or create_port
fake_neutron_port_id = str(uuid.uuid4())
fake_port = base.TestKuryrBase._get_fake_port(
docker_endpoint_id, neutron_network_id,
fake_neutron_port_id, lib_const.PORT_STATUS_ACTIVE,
subnet_v4_id,
neutron_subnet_v4_address=requested_address)
fixed_ip_existing = [('subnet_id=%s' % subnet_v4_id)]
if existing_port:
fake_existing_port = fake_port['port']
fake_existing_port['binding:host_id'] = ''
fake_existing_port['binding:vif_type'] = 'unbound'
fake_ports_response = {'ports': [fake_existing_port]}
else:
fake_ports_response = {'ports': []}
fixed_ip_existing.append('ip_address=%s' % requested_address)
app.neutron.list_ports(fixed_ips=fixed_ip_existing).AndReturn(
fake_ports_response)
if existing_port:
update_port = {
'admin_state_up': True,
'binding:host_id': lib_utils.get_hostname(),
}
self.mox.StubOutWithMock(app.neutron, 'update_port')
app.neutron.update_port(fake_neutron_port_id,
{'port': update_port}).AndReturn(
fake_port)
else:
port_request = {
'name': 'kuryr-unbound-port',
'admin_state_up': True,
'network_id': neutron_network_id,
'binding:host_id': lib_utils.get_hostname(),
}
fixed_ips = port_request['fixed_ips'] = []
fixed_ip = {'subnet_id': subnet_v4_id,
'ip_address': requested_address}
fixed_ips.append(fixed_ip)
self.mox.StubOutWithMock(app.neutron, 'create_port')
app.neutron.create_port({'port': port_request}).AndReturn(
fake_port)
# Apply mocks
self.mox.ReplayAll()
# Testing container ip allocation
fake_request = {
'PoolID': fake_kuryr_subnetpool_id,
'Address': requested_address,
'Options': {}
}
response = self.app.post('/IpamDriver.RequestAddress',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual(requested_address + '/16', decoded_json['Address'])
def test_ipam_driver_request_address_overlapping_cidr(self):
# faking list_subnetpools
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_kuryr_subnetpool_id2 = str(uuid.uuid4())
fake_name = lib_utils.get_neutron_subnetpool_name(FAKE_IP4_CIDR)
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
app.neutron.list_subnetpools(id=fake_kuryr_subnetpool_id).AndReturn(
kuryr_subnetpools)
# faking list_subnets
docker_endpoint_id = lib_utils.get_hash()
neutron_network_id = str(uuid.uuid4())
neutron_network_id2 = str(uuid.uuid4())
neutron_subnet_v4_id = str(uuid.uuid4())
neutron_subnet_v4_id2 = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
neutron_network_id, docker_endpoint_id, neutron_subnet_v4_id,
subnetpool_id=fake_kuryr_subnetpool_id,
cidr=FAKE_IP4_CIDR)
fake_v4_subnet2 = self._get_fake_v4_subnet(
neutron_network_id2, docker_endpoint_id, neutron_subnet_v4_id2,
subnetpool_id=fake_kuryr_subnetpool_id2,
cidr=FAKE_IP4_CIDR)
fake_subnet_response = {
'subnets': [
fake_v4_subnet2['subnet'],
fake_v4_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet_response)
# faking create_port
fake_neutron_port_id = str(uuid.uuid4())
fake_port = base.TestKuryrBase._get_fake_port(
docker_endpoint_id, neutron_network_id,
fake_neutron_port_id,
neutron_subnet_v4_id=neutron_subnet_v4_id,
neutron_subnet_v4_address="10.0.0.5")
port_request = {
'name': 'kuryr-unbound-port',
'admin_state_up': True,
'network_id': neutron_network_id,
'binding:host_id': lib_utils.get_hostname(),
}
port_request['fixed_ips'] = []
fixed_ip = {'subnet_id': neutron_subnet_v4_id}
port_request['fixed_ips'].append(fixed_ip)
self.mox.StubOutWithMock(app.neutron, 'create_port')
app.neutron.create_port({'port': port_request}).AndReturn(fake_port)
# Apply mocks
self.mox.ReplayAll()
# Testing container ip allocation
fake_request = {
'PoolID': fake_kuryr_subnetpool_id,
'Address': '', # Querying for container address
'Options': {}
}
response = self.app.post('/IpamDriver.RequestAddress',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual('10.0.0.5/16', decoded_json['Address'])
def test_ipam_driver_request_address_for_same_gateway(self):
# faking list_subnetpools
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = lib_utils.get_neutron_subnetpool_name(FAKE_IP4_CIDR)
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
app.neutron.list_subnetpools(id=fake_kuryr_subnetpool_id).AndReturn(
kuryr_subnetpools)
# faking list_subnets
docker_endpoint_id = lib_utils.get_hash()
neutron_network_id = str(uuid.uuid4())
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
neutron_network_id, docker_endpoint_id, subnet_v4_id,
subnetpool_id=fake_kuryr_subnetpool_id,
cidr=FAKE_IP4_CIDR)
fake_v4_subnet['subnet'].update(gateway_ip='10.0.0.1')
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet_response)
# Apply mocks
self.mox.ReplayAll()
# Testing container ip allocation
fake_request = {
'PoolID': fake_kuryr_subnetpool_id,
'Address': '10.0.0.1',
'Options': {
const.REQUEST_ADDRESS_TYPE: const.NETWORK_GATEWAY_OPTIONS
}
}
response = self.app.post('/IpamDriver.RequestAddress',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertEqual('10.0.0.1/16', decoded_json['Address'])
def test_ipam_driver_request_address_for_different_gateway(self):
# faking list_subnetpools
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = lib_utils.get_neutron_subnetpool_name(FAKE_IP4_CIDR)
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR],
name=fake_name)
app.neutron.list_subnetpools(id=fake_kuryr_subnetpool_id).AndReturn(
kuryr_subnetpools)
# faking list_subnets
docker_endpoint_id = lib_utils.get_hash()
neutron_network_id = str(uuid.uuid4())
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
neutron_network_id, docker_endpoint_id, subnet_v4_id,
subnetpool_id=fake_kuryr_subnetpool_id,
cidr=FAKE_IP4_CIDR)
fake_v4_subnet['subnet'].update(gateway_ip='10.0.0.1')
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet_response)
# Apply mocks
self.mox.ReplayAll()
# Testing container ip allocation
fake_request = {
'PoolID': fake_kuryr_subnetpool_id,
'Address': '10.0.0.5', # Different with existed gw ip
'Options': {
const.REQUEST_ADDRESS_TYPE: const.NETWORK_GATEWAY_OPTIONS
}
}
response = self.app.post('/IpamDriver.RequestAddress',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(500, response.status_code)
decoded_json = jsonutils.loads(response.data)
self.assertIn('Err', decoded_json)
err_message = ("Requested gateway {0} does not match with "
"gateway {1} in existed network.").format(
'10.0.0.5', '10.0.0.1')
self.assertEqual({'Err': err_message}, decoded_json)
def test_ipam_driver_release_address(self):
# faking list_subnetpools
self.mox.StubOutWithMock(app.neutron, 'list_subnetpools')
fake_kuryr_subnetpool_id = str(uuid.uuid4())
fake_name = str('-'.join(['kuryrPool', FAKE_IP4_CIDR]))
kuryr_subnetpools = self._get_fake_v4_subnetpools(
fake_kuryr_subnetpool_id, prefixes=[FAKE_IP4_CIDR], name=fake_name)
app.neutron.list_subnetpools(id=fake_kuryr_subnetpool_id).AndReturn(
kuryr_subnetpools)
fake_ip4 = '10.0.0.5'
# faking list_subnets
docker_network_id = lib_utils.get_hash()
docker_endpoint_id = lib_utils.get_hash()
neutron_network_id = docker_network_id = str(uuid.uuid4())
subnet_v4_id = str(uuid.uuid4())
fake_v4_subnet = self._get_fake_v4_subnet(
docker_network_id, docker_endpoint_id, subnet_v4_id,
subnetpool_id=fake_kuryr_subnetpool_id,
cidr=FAKE_IP4_CIDR)
fake_subnet_response = {
'subnets': [
fake_v4_subnet['subnet']
]
}
self.mox.StubOutWithMock(app.neutron, 'list_subnets')
app.neutron.list_subnets(cidr=FAKE_IP4_CIDR).AndReturn(
fake_subnet_response)
#faking list_ports and delete_port
fake_neutron_port_id = str(uuid.uuid4())
fake_port = base.TestKuryrBase._get_fake_port(
docker_endpoint_id, neutron_network_id,
fake_neutron_port_id, lib_const.PORT_STATUS_ACTIVE,
subnet_v4_id,
neutron_subnet_v4_address=fake_ip4)
port_request = {
'name': 'demo-port',
'admin_state_up': True,
'network_id': neutron_network_id,
}
rel_fixed_ips = port_request['fixed_ips'] = []
fixed_ip = {'subnet_id': subnet_v4_id}
fixed_ip['ip_address'] = fake_ip4
rel_fixed_ips.append(fixed_ip)
self.mox.StubOutWithMock(app.neutron, 'list_ports')
list_port_response = {'ports': [fake_port['port']]}
app.neutron.list_ports().AndReturn(
list_port_response)
self.mox.StubOutWithMock(app.neutron, 'delete_port')
app.neutron.delete_port(fake_port['port']['id']).AndReturn({})
# Apply mocks
self.mox.ReplayAll()
fake_request = {
'PoolID': fake_kuryr_subnetpool_id,
'Address': fake_ip4
}
response = self.app.post('/IpamDriver.ReleaseAddress',
content_type='application/json',
data=jsonutils.dumps(fake_request))
self.assertEqual(200, response.status_code)
| celebdor/kuryr-libnetwork | kuryr_libnetwork/tests/unit/test_kuryr_ipam.py | Python | apache-2.0 | 25,176 |
import logging
from django.contrib import messages
from django.contrib.auth import authenticate
from django.core.urlresolvers import reverse
from django.http.response import Http404, HttpResponseRedirect
from django.shortcuts import render, redirect, render_to_response
# Create your views here.
from django.template.context import RequestContext
from rest_framework.authtoken.models import Token
from api.models import App
from ui.forms import LoginForm
log = logging.getLogger(__name__)
def login(request):
# if request.user.is_authenticated():
# return redirect('/')
callback = request.GET.get('callback', '')
if not callback.endswith("/"):
callback=callback+"/"
log.debug("callback %s",callback)
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user = authenticate(username=username, password=password)
if user is not None:
auth_app = user.crowduser.auth_apps.all()
try:
app = App.objects.get(callback=callback)
except Exception:
raise Http404
token = Token.objects.get(user=user)
if app not in auth_app:
log.debug("not in app")
return redirect(reverse(auth)+"?callback="+callback+"&token="+token.key)
else:
log.debug("in app")
# log.debug("Username %s",user.username)
# get the app
# apptoken = request.META.get('HTTP_AUTHORIZATION', b'')
callback = request.GET.get('callback', '')
if type(callback) == type(''):
raise Http404
token = Token.objects.get(user=user)
redirect_to = callback+"?token="+token.key
return HttpResponseRedirect(redirect_to)
else:
messages.info(request,'username and password not valid')
form.helper.form_action = reverse('login') + '?callback=' + callback
render_to_response('ui/login.html', {'form': form}, context_instance=RequestContext(request))
else:
form.helper.form_action = reverse('login') + '?callback=' + callback
render_to_response('ui/login.html', {'form': form}, context_instance=RequestContext(request))
else:
form = LoginForm()
form.helper.form_action = reverse('login') + '?callback=' + callback
# context = {'form': form,'callback':callback}
# context = {}
return render_to_response('ui/login.html', {'form': form}, context_instance=RequestContext(request))
def auth(request):
callback = request.GET.get('callback', '')
token = request.GET.get('token', '')
if not callback.endswith("/"):
callback=callback+"/"
if request.method == 'POST':
token = Token.objects.get(key=token)
app = App.objects.get(callback=callback)
crowduser = token.user.crowduser
crowduser.auth_apps.add(app)
crowduser.save()
redirect_to = callback+"?token="+token.key+"&id="+crowduser.user.pk
return HttpResponseRedirect(redirect_to)
else:
app = App.objects.get(callback=callback)
return render_to_response('ui/app.html', {'app': app,'callback':callback,'token':token}, context_instance=RequestContext(request))
| Crowdcomputer/CroCoAPI | ui/views.py | Python | gpl-2.0 | 3,534 |
#!/usr/bin/env python
#
# This script changes a SASLDB2 realm to another one.
#
# Written by Sander Steffann <[email protected]>
# No rights reserved: do with this script as you please.
#
# Usage: change-sasldb2-realm.py <orig-realm> <new-realm>
# where <orig-realm> and <new-realm> are case-sensitive.
#
# !WARNING! This script opens /etc/sasldb2 directly, without going through
# the official API. If the file format changes this script breaks.
#
# The following file-format for sasldb2 is assumed: a BSD-DB hash-file with a
# key in the format "<data> \x00 <realm> \x00 <data>". The <data> parts are
# copied without modification. The values corresponding to the keys are copied
# without modification too.
#
# To be safe, this script opens /etc/sasldb2 read-only and writes the result
# to a new file: /etc/sasldb2.new. If this file exists this script will
# overwrite it. Don't overwrite your /etc/sasldb2 file until you are sure the
# results in /etc/sasldb2.new are what you want.
#
# This script uses the bsddb module provided with python. It is assumed that
# the DB library used by python is the same as the one used by SASL2. If this
# is not the case the script will abort with an error when opening sasldb2.
#
import bsddb
import sys
# Check command-line arguments
if len(sys.argv) != 3:
print "Usage: %s <orig-realm> <new-realm>" % (sys.argv[0],)
sys.exit(1)
# Extract the command-line arguments into properly named variables
orig_realm = sys.argv[1]
new_realm = sys.argv[2]
# Open the BSD-DB files
orig_db = bsddb.hashopen('/etc/sasldb2', 'r')
new_db = bsddb.hashopen('/etc/sasldb2.new', 'n')
# Loop over all the keys in the original sasldb2
for orig_key in orig_db.keys():
# Split the key into the three components
elements = orig_key.split('\x00')
if len(elements) != 3:
raise ValueError, "The structure of /etc/sasldb2 is not as expected!"
# Compare the current realm with the realm we want to replace
if elements[1] == orig_realm:
# Replace the current realm with the new one
new_key = '\x00'.join([elements[0], new_realm, elements[2]])
else:
# Wrong realm: Don't touch the key
new_key = orig_key
# Write the key with the corresponding value in the new DB
new_db[new_key] = orig_db[orig_key]
# Close the DB files
orig_db.close()
new_db.close()
| papyrussolution/OpenPapyrus | Src/OSF/cyrus-sasl/contrib/change-sasldb2-realm.py | Python | agpl-3.0 | 2,305 |
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from django.conf import settings
from uwcs_website.shortcuts import template_mail
class Command(BaseCommand):
help = "emails password change to user"
requires_model_validation = True
def handle(self, *args, **options):
'''
'''
for u in User.objects.all():
password = User.objects.make_random_password()
u.set_password(password)
u.save()
try:
template_mail(
'New Website Password',
'memberinfo/migration_email',
{'first': u.first_name, 'last':u.last_name, 'username':u.username, 'password':password},
settings.COMPSOC_TECHTEAM_EMAIL,
[u.email]
)
except Exception, e:
print u.username
print e
def usage(self, subcommand): pass
| UWCS/uwcs-website | uwcs_website/memberinfo/management/commands/email_all_passwords.py | Python | agpl-3.0 | 989 |
from __future__ import division
import numpy as np
from scipy import io
def dataload(filename):
data = np.load(filename)
#print(data)
return data
# Here, the data is normalized between 0 and 1 by using the logarithmic process of 10 for the sample
def log_normalization(data):
colnum = len(data[0])
rownum = len(data)
#print data[0:len(data),0]
nordata = np.zeros((rownum, colnum))
for i in range(0, colnum):
tempi = np.log10(data[0:rownum,i])
tempi = (tempi - np.min(tempi)) / (np.max(tempi) - np.min(tempi))
nordata[0:rownum,i] = tempi
return nordata
def normalization(data):
colnum = len(data[0])
rownum = len(data)
nordata = np.zeros((rownum, colnum))
for i in range(0, colnum):
nordata[0:rownum,i] = (data[0:rownum,i] - np.min(data[0:rownum,i])) / (np.max(data[0:rownum,i]) - np.min(data[0:rownum,i]))
return nordata
#The mat data files are converted to npy files
def mattonpz(srcname,srckey,outname):
in_d = io.loadmat(srcname)
oct_a = in_d[srckey]
oct_a = float(oct_a)
np.save(outname,oct_a)
if __name__ == '__main__':
print log_normalization(dataload('in_data.npy'))
| evanrao/NNReserve | fwnn/matstorage.py | Python | apache-2.0 | 1,189 |
#!/usr/bin/env python
"""
Copyright (c) 2014-2022 Maltrail developers (https://github.com/stamparm/maltrail/)
See the file 'LICENSE' for copying permission
"""
import re
from core.common import retrieve_content
__url__ = "https://github.com/JR0driguezB/malware_configs"
__check__ = "mcconf"
__info__ = "trickbot (malware)"
__reference__ = "github.com/JR0driguezB"
def fetch():
retval = {}
content = retrieve_content("https://github.com/JR0driguezB/malware_configs/tree/master/TrickBot/mcconf_files")
if __check__ in content:
last = re.findall(r"config.conf_\d+.xml", content)[-1]
content = retrieve_content("https://raw.githubusercontent.com/JR0driguezB/malware_configs/master/TrickBot/mcconf_files/%s" % last)
if __check__ in content:
for match in re.finditer(r"<srv>([\d.]+)", content):
retval[match.group(1)] = (__info__, __reference__)
return retval
| stamparm/maltrail | trails/feeds/trickbot.py | Python | mit | 929 |
from django.conf.urls import include, url
# Doorstep apps urls
urlpatterns = [
url(r'^', include('doorstep.catalog.urls')),
url(r'^accounts/', include('doorstep.accounts.urls')),
url(r'^sales/', include('doorstep.sales.urls')),
url(r'^payments/', include('doorstep.payments.urls')),
url(r'^pages/', include('doorstep.pages.urls'))
]
| mysteryjeans/doorsale-demo | doorstep/urls.py | Python | gpl-2.0 | 355 |
""" Common functions and logging setup.
"""
import sys
import os
import itertools
import time
import logging, logging.handlers
from spectrum.config import LOG_PATH, LOG_SIZE, LOG_LEVEL
def get_logger():
""" Get a logger based on the system path.
"""
logger = logging.getLogger('werkzeug') # use this name so flask doesn't use its own logger
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages (these end up in log file)
logger.filename = '{0}.log'.format(os.path.basename(sys.argv[0]).replace('.py', ''))
logger.path = os.path.join(LOG_PATH, logger.filename)
rf_handler = logging.handlers.RotatingFileHandler(logger.path, maxBytes=LOG_SIZE, backupCount=0)
rf_handler.setLevel(getattr(logging, LOG_LEVEL, logging.DEBUG))
# create console handler with a higher log level (these end up in system journal)
c_handler = logging.StreamHandler()
c_handler.setLevel(logging.DEBUG if 'debug' in sys.argv else logging.WARN)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
rf_handler.setFormatter(formatter)
c_handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(rf_handler)
logger.addHandler(c_handler)
logger.info("Obtained logger")
return logger
log = get_logger() # pylint: disable=invalid-name
class FakeLogger(object):
""" Fake logger that does nothing.
"""
def error(self, *_): # pylint: disable=missing-docstring
pass
def warn(self, *_): # pylint: disable=missing-docstring
pass
def info(self, *_): # pylint: disable=missing-docstring
pass
def debug(self, *_): # pylint: disable=missing-docstring
pass
def now():
""" Return time in milliseconds since the epoch.
"""
return int(time.time() * 1000)
def freq(freq_n, scan_config): # pylint: disable=redefined-builtin
""" Return the frequency for the given freq_n. Use of this is fairly inefficient
because the whole range of frequencies is generated each time.
"""
return next(itertools.islice(scan(scan_config), freq_n, None))[1]
def _convert(dic):
""" Auto-convert empty strings into None, number strings into numbers, and boolean
strings into booleans. Recurse into dictionaries.
"""
for k, v in dic.iteritems():
if isinstance(v, dict):
_convert(v)
if not isinstance(v, basestring):
continue
v = v.strip()
if v == '':
dic[k] = None
continue
if v.lower() == 'true':
dic[k] = True
continue
if v.lower() == 'false':
dic[k] = False
continue
try:
dic[k] = int(v)
continue
except (ValueError, TypeError):
pass
try:
dic[k] = float(v)
continue
except (ValueError, TypeError):
pass
#FIXME prefer to do the interpretation of freq specs, to produce a generator, in one step
def parse_config(config, worker):
""" Convert the given config using _convert, and return parsed scan settings.
The return value may be fed into scan().
"""
_convert(config)
scan_cfg = []
if worker in config and 'freqs' in config[worker]:
for x in config[worker]['freqs']:
# x is either a range or a single frequency
if 'range' in x and x.get('enabled', False):
scan_cfg.append([int(10 ** x['exp'] * float(f)) for f in x['range']])
scan_cfg[-1][1] += scan_cfg[-1][2] / 2 # ensure to include the end of the range
elif 'freq' in x and x.get('enabled', False):
scan_cfg.append(int(10 ** int(x['exp']) * float(x['freq'])))
return scan_cfg
#FIXME prefer to do the interpretation of freq specs, to produce a generator, in one step
def scan(scan_config): # pylint: disable=redefined-builtin
""" Iterate frequency indices and frequency values in the specified scan config.
"""
idx = 0
for freq in itertools.chain(*[xrange(*x) if isinstance(x, list) else [x] for x in scan_config]):
yield idx, freq
idx += 1
def fs_size(path):
""" Return file system usage at the given path.
"""
result = os.popen('du -sk {0}'.format(path)).read()
try:
return int(result.split('\t')[0]) * 1024
except ValueError:
return 0
def fs_free(path):
""" Return file system space free for the volume containing the given path.
"""
result = os.popen('df -k {0}'.format(path)).read()
try:
return int(result.split('\n')[1].split()[3]) * 1024
except ValueError:
return 0
def check_device(value):
""" Check that the input specifies a valid device path, and return that path.
"""
if os.path.basename(value) != value:
raise Exception("Bad device specifier: {0}".format(value))
return '/dev/{0}'.format(value)
def psm_name():
""" Return the box name.
"""
return os.popen('uname -n').read().strip()
def mkdirs(file_path):
""" Ensure parent directories for the given file path exist (creating them
if not).
"""
path = os.path.dirname(file_path)
if not os.path.exists(path):
os.makedirs(path)
| jennyb/piSpecMon | spectrum/common.py | Python | gpl-2.0 | 5,355 |
#!/usr/bin/env python
#
# Copyright 2015-2015 breakwa11
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
import binascii
import struct
import base64
import time
import random
import hmac
import hashlib
import string
from shadowsocks import common
from shadowsocks.obfsplugin import plain
from shadowsocks.common import to_bytes, to_str, ord
from shadowsocks import lru_cache
def create_tls_ticket_auth_obfs(method):
return tls_ticket_auth(method)
obfs_map = {
'tls1.2_ticket_auth': (create_tls_ticket_auth_obfs,),
'tls1.2_ticket_auth_compatible': (create_tls_ticket_auth_obfs,),
}
def match_begin(str1, str2):
if len(str1) >= len(str2):
if str1[:len(str2)] == str2:
return True
return False
class obfs_auth_data(object):
def __init__(self):
self.client_data = lru_cache.LRUCache(60 * 5)
self.client_id = os.urandom(32)
self.startup_time = int(time.time() - 60 * 30) & 0xFFFFFFFF
class tls_ticket_auth(plain.plain):
def __init__(self, method):
self.method = method
self.handshake_status = 0
self.send_buffer = b''
self.recv_buffer = b''
self.client_id = b''
self.max_time_dif = 0 # time dif (second) setting
self.tls_version = b'\x03\x03'
def init_data(self):
return obfs_auth_data()
def sni(self, url):
url = common.to_bytes(url)
data = b"\x00" + struct.pack('>H', len(url)) + url
data = b"\x00\x00" + struct.pack('>H', len(data) + 2) + struct.pack('>H', len(data)) + data
return data
def pack_auth_data(self, client_id):
utc_time = int(time.time()) & 0xFFFFFFFF
data = struct.pack('>I', utc_time) + os.urandom(18)
data += hmac.new(self.server_info.key + client_id, data, hashlib.sha1).digest()[:10]
return data
def client_encode(self, buf):
if self.handshake_status == -1:
return buf
if self.handshake_status == 8:
ret = b''
while len(buf) > 2048:
size = min(struct.unpack('>H', os.urandom(2))[0] % 4096 + 100, len(buf))
ret += b"\x17" + self.tls_version + struct.pack('>H', size) + buf[:size]
buf = buf[size:]
if len(buf) > 0:
ret += b"\x17" + self.tls_version + struct.pack('>H', len(buf)) + buf
return ret
self.send_buffer += b"\x17" + self.tls_version + struct.pack('>H', len(buf)) + buf
if self.handshake_status == 0:
self.handshake_status = 1
data = self.tls_version + self.pack_auth_data(self.server_info.data.client_id) + b"\x20" + self.server_info.data.client_id + binascii.unhexlify(b"001cc02bc02fcca9cca8cc14cc13c00ac014c009c013009c0035002f000a" + b"0100")
ext = binascii.unhexlify(b"ff01000100")
host = self.server_info.obfs_param or self.server_info.host
if host and host[-1] in string.digits:
host = ''
hosts = host.split(',')
host = random.choice(hosts)
ext += self.sni(host)
ext += b"\x00\x17\x00\x00"
ext += b"\x00\x23\x00\xd0" + os.urandom(208) # ticket
ext += binascii.unhexlify(b"000d001600140601060305010503040104030301030302010203")
ext += binascii.unhexlify(b"000500050100000000")
ext += binascii.unhexlify(b"00120000")
ext += binascii.unhexlify(b"75500000")
ext += binascii.unhexlify(b"000b00020100")
ext += binascii.unhexlify(b"000a0006000400170018")
data += struct.pack('>H', len(ext)) + ext
data = b"\x01\x00" + struct.pack('>H', len(data)) + data
data = b"\x16\x03\x01" + struct.pack('>H', len(data)) + data
return data
elif self.handshake_status == 1 and len(buf) == 0:
data = b"\x14" + self.tls_version + b"\x00\x01\x01" #ChangeCipherSpec
data += b"\x16" + self.tls_version + b"\x00\x20" + os.urandom(22) #Finished
data += hmac.new(self.server_info.key + self.server_info.data.client_id, data, hashlib.sha1).digest()[:10]
ret = data + self.send_buffer
self.send_buffer = b''
self.handshake_status = 8
return ret
return b''
def client_decode(self, buf):
if self.handshake_status == -1:
return (buf, False)
if self.handshake_status == 8:
ret = b''
self.recv_buffer += buf
while len(self.recv_buffer) > 5:
if ord(self.recv_buffer[0]) != 0x17:
logging.info("data = %s" % (binascii.hexlify(self.recv_buffer)))
raise Exception('server_decode appdata error')
size = struct.unpack('>H', self.recv_buffer[3:5])[0]
if len(self.recv_buffer) < size + 5:
break
buf = self.recv_buffer[5:size+5]
ret += buf
self.recv_buffer = self.recv_buffer[size+5:]
return (ret, False)
if len(buf) < 11 + 32 + 1 + 32:
raise Exception('client_decode data error')
verify = buf[11:33]
if hmac.new(self.server_info.key + self.server_info.data.client_id, verify, hashlib.sha1).digest()[:10] != buf[33:43]:
raise Exception('client_decode data error')
return (b'', True)
def server_encode(self, buf):
if self.handshake_status == -1:
return buf
if self.handshake_status == 8:
ret = b''
while len(buf) > 2048:
size = min(struct.unpack('>H', os.urandom(2))[0] % 4096 + 100, len(buf))
ret += b"\x17" + self.tls_version + struct.pack('>H', size) + buf[:size]
buf = buf[size:]
if len(buf) > 0:
ret += b"\x17" + self.tls_version + struct.pack('>H', len(buf)) + buf
return ret
self.handshake_status = 3
data = self.tls_version + self.pack_auth_data(self.client_id) + b"\x20" + self.client_id + binascii.unhexlify(b"c02f000005ff01000100")
data = b"\x02\x00" + struct.pack('>H', len(data)) + data #server hello
data = b"\x16\x03\x03" + struct.pack('>H', len(data)) + data
data += b"\x14" + self.tls_version + b"\x00\x01\x01" #ChangeCipherSpec
data += b"\x16" + self.tls_version + b"\x00\x20" + os.urandom(22) #Finished
data += hmac.new(self.server_info.key + self.client_id, data, hashlib.sha1).digest()[:10]
return data
def decode_error_return(self, buf):
self.handshake_status = -1
if self.method == 'tls1.2_ticket_auth':
return (b'E'*2048, False, False)
return (buf, True, False)
def server_decode(self, buf):
if self.handshake_status == -1:
return (buf, True, False)
if self.handshake_status == 8:
ret = b''
self.recv_buffer += buf
while len(self.recv_buffer) > 5:
if ord(self.recv_buffer[0]) != 0x17 or ord(self.recv_buffer[1]) != 0x3 or ord(self.recv_buffer[2]) != 0x3:
logging.info("data = %s" % (binascii.hexlify(self.recv_buffer)))
raise Exception('server_decode appdata error')
size = struct.unpack('>H', self.recv_buffer[3:5])[0]
if len(self.recv_buffer) < size + 5:
break
ret += self.recv_buffer[5:size+5]
self.recv_buffer = self.recv_buffer[size+5:]
return (ret, True, False)
if self.handshake_status == 3:
verify = buf
verify_len = 43 - 10
if len(buf) < 43:
raise Exception('server_decode data error')
if not match_begin(buf, b"\x14" + self.tls_version + b"\x00\x01\x01"): #ChangeCipherSpec
raise Exception('server_decode data error')
buf = buf[6:]
if not match_begin(buf, b"\x16" + self.tls_version + b"\x00\x20"): #Finished
raise Exception('server_decode data error')
if hmac.new(self.server_info.key + self.client_id, verify[:verify_len], hashlib.sha1).digest()[:10] != verify[verify_len:verify_len+10]:
raise Exception('server_decode data error')
if len(buf) < 37:
raise Exception('server_decode data error')
self.recv_buffer = buf[37:]
self.handshake_status = 8
return self.server_decode(b'')
#raise Exception("handshake data = %s" % (binascii.hexlify(buf)))
self.handshake_status = 2
ogn_buf = buf
if not match_begin(buf, b'\x16\x03\x01'):
return self.decode_error_return(ogn_buf)
buf = buf[3:]
if struct.unpack('>H', buf[:2])[0] != len(buf) - 2:
logging.info("tls_auth wrong tls head size")
return self.decode_error_return(ogn_buf)
buf = buf[2:]
if not match_begin(buf, b'\x01\x00'): #client hello
logging.info("tls_auth not client hello message")
return self.decode_error_return(ogn_buf)
buf = buf[2:]
if struct.unpack('>H', buf[:2])[0] != len(buf) - 2:
logging.info("tls_auth wrong message size")
return self.decode_error_return(ogn_buf)
buf = buf[2:]
if not match_begin(buf, self.tls_version):
logging.info("tls_auth wrong tls version")
return self.decode_error_return(ogn_buf)
buf = buf[2:]
verifyid = buf[:32]
buf = buf[32:]
sessionid_len = ord(buf[0])
if sessionid_len < 32:
logging.info("tls_auth wrong sessionid_len")
return self.decode_error_return(ogn_buf)
sessionid = buf[1:sessionid_len + 1]
buf = buf[sessionid_len+1:]
self.client_id = sessionid
sha1 = hmac.new(self.server_info.key + sessionid, verifyid[:22], hashlib.sha1).digest()[:10]
utc_time = struct.unpack('>I', verifyid[:4])[0]
time_dif = common.int32((int(time.time()) & 0xffffffff) - utc_time)
if self.server_info.obfs_param:
try:
self.max_time_dif = int(self.server_info.obfs_param)
except:
pass
if self.max_time_dif > 0 and (time_dif < -self.max_time_dif or time_dif > self.max_time_dif \
or common.int32(utc_time - self.server_info.data.startup_time) < -self.max_time_dif / 2):
logging.info("tls_auth wrong time")
return self.decode_error_return(ogn_buf)
if sha1 != verifyid[22:]:
logging.info("tls_auth wrong sha1")
return self.decode_error_return(ogn_buf)
if self.server_info.data.client_data.get(verifyid[:22]):
logging.info("replay attack detect, id = %s" % (binascii.hexlify(verifyid)))
return self.decode_error_return(ogn_buf)
self.server_info.data.client_data.sweep()
self.server_info.data.client_data[verifyid[:22]] = sessionid
# (buffer_to_recv, is_need_decrypt, is_need_to_encode_and_send_back)
return (b'', False, True)
| xieshenglin/shadowsocks | shadowsocks/obfsplugin/obfs_tls.py | Python | apache-2.0 | 11,811 |
#
# XendBootloader.py - Framework to run a boot loader for picking the kernel
#
# Copyright 2005-2006 Red Hat, Inc.
# Jeremy Katz <[email protected]>
#
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import os, select, errno, stat, signal, tty
import random
import shlex
from xen.xend import sxp
from xen.util import mkdir, oshelp
from XendLogging import log
from XendError import VmError
import pty, termios, fcntl
from xen.lowlevel import ptsname
def bootloader(blexec, disk, dom, quiet = False, blargs = '', kernel = '',
ramdisk = '', kernel_args = ''):
"""Run the boot loader executable on the given disk and return a
config image.
@param blexec Binary to use as the boot loader
@param disk Disk to run the boot loader on.
@param dom DomainInfo representing the domain being booted.
@param quiet Run in non-interactive mode, just booting the default.
@param blargs Arguments to pass to the bootloader."""
if not os.access(blexec, os.X_OK):
msg = "Bootloader isn't executable"
log.error(msg)
raise VmError(msg)
if not os.access(disk, os.R_OK):
msg = "Disk isn't accessible"
log.error(msg)
raise VmError(msg)
if os.uname()[0] == "NetBSD" and disk.startswith('/dev/'):
disk = disk.replace("/dev/", "/dev/r")
mkdir.parents("/var/run/xend/boot/", stat.S_IRWXU)
while True:
fifo = "/var/run/xend/boot/xenbl.%s" %(random.randint(0, 32000),)
try:
os.mkfifo(fifo, 0600)
except OSError, e:
if (e.errno != errno.EEXIST):
raise
break
# We need to present the bootloader's tty as a pty slave that xenconsole
# can access. Since the bootloader itself needs a pty slave,
# we end up with a connection like this:
#
# xenconsole -- (slave pty1 master) <-> (master pty2 slave) -- bootloader
#
# where we copy characters between the two master fds, as well as
# listening on the bootloader's fifo for the results.
(m1, s1) = pty.openpty()
# On Solaris, the pty master side will get cranky if we try
# to write to it while there is no slave. To work around this,
# keep the slave descriptor open until we're done. Set it
# to raw terminal parameters, otherwise it will echo back
# characters, which will confuse the I/O loop below.
# Furthermore, a raw master pty device has no terminal
# semantics on Solaris, so don't try to set any attributes
# for it.
if os.uname()[0] != 'SunOS' and os.uname()[0] != 'NetBSD':
tty.setraw(m1)
os.close(s1)
else:
tty.setraw(s1)
fcntl.fcntl(m1, fcntl.F_SETFL, os.O_NDELAY)
slavename = ptsname.ptsname(m1)
dom.storeDom("console/tty", slavename)
# Release the domain lock here, because we definitely don't want
# a stuck bootloader to deny service to other xend clients.
from xen.xend import XendDomain
domains = XendDomain.instance()
domains.domains_lock.release()
(child, m2) = pty.fork()
if (not child):
args = [ blexec ]
if kernel:
args.append("--kernel=%s" % kernel)
if ramdisk:
args.append("--ramdisk=%s" % ramdisk)
if kernel_args:
args.append("--args=%s" % kernel_args)
if quiet:
args.append("-q")
args.append("--output=%s" % fifo)
if blargs:
args.extend(shlex.split(blargs))
args.append(disk)
try:
log.debug("Launching bootloader as %s." % str(args))
env = os.environ.copy()
env['TERM'] = 'vt100'
oshelp.close_fds()
os.execvpe(args[0], args, env)
except OSError, e:
print e
pass
os._exit(1)
# record that this domain is bootloading
dom.bootloader_pid = child
# On Solaris, the master pty side does not have terminal semantics,
# so don't try to set any attributes, as it will fail.
if os.uname()[0] != 'SunOS':
tty.setraw(m2);
fcntl.fcntl(m2, fcntl.F_SETFL, os.O_NDELAY);
while True:
try:
r = os.open(fifo, os.O_RDONLY)
except OSError, e:
if e.errno == errno.EINTR:
continue
break
fcntl.fcntl(r, fcntl.F_SETFL, os.O_NDELAY);
ret = ""
inbuf=""; outbuf="";
# filedescriptors:
# r - input from the bootloader (bootstring output)
# m1 - input/output from/to xenconsole
# m2 - input/output from/to pty that controls the bootloader
# The filedescriptors are NDELAY, so it's ok to try to read
# bigger chunks than may be available, to keep e.g. curses
# screen redraws in the bootloader efficient. m1 is the side that
# gets xenconsole input, which will be keystrokes, so a small number
# is sufficient. m2 is pygrub output, which will be curses screen
# updates, so a larger number (1024) is appropriate there.
#
# For writeable descriptors, only include them in the set for select
# if there is actual data to write, otherwise this would loop too fast,
# eating up CPU time.
while True:
wsel = []
if len(outbuf) != 0:
wsel = wsel + [m1]
if len(inbuf) != 0:
wsel = wsel + [m2]
sel = select.select([r, m1, m2], wsel, [])
try:
if m1 in sel[0]:
s = os.read(m1, 16)
inbuf += s
if m2 in sel[1]:
n = os.write(m2, inbuf)
inbuf = inbuf[n:]
except OSError, e:
if e.errno == errno.EIO:
pass
try:
if m2 in sel[0]:
s = os.read(m2, 1024)
outbuf += s
if m1 in sel[1]:
n = os.write(m1, outbuf)
outbuf = outbuf[n:]
except OSError, e:
if e.errno == errno.EIO:
pass
if r in sel[0]:
s = os.read(r, 128)
ret = ret + s
if len(s) == 0:
break
del inbuf
del outbuf
os.waitpid(child, 0)
os.close(r)
os.close(m2)
os.close(m1)
if os.uname()[0] == 'SunOS' or os.uname()[0] == 'NetBSD':
os.close(s1)
os.unlink(fifo)
# Re-acquire the lock to cover the changes we're about to make
# when we return to domain creation.
domains.domains_lock.acquire()
if dom.bootloader_pid is None:
msg = "Domain was died while the bootloader was running."
log.error(msg)
raise VmError, msg
dom.bootloader_pid = None
if len(ret) == 0:
msg = "Boot loader didn't return any data!"
log.error(msg)
raise VmError, msg
pin = sxp.Parser()
pin.input(ret)
pin.input_eof()
blcfg = pin.val
return blcfg
def bootloader_tidy(dom):
if hasattr(dom, "bootloader_pid") and dom.bootloader_pid is not None:
pid = dom.bootloader_pid
dom.bootloader_pid = None
os.kill(pid, signal.SIGKILL)
| sudkannan/xen-hv | tools/python/xen/xend/XendBootloader.py | Python | gpl-2.0 | 7,323 |
# dummy plugin of Adder for VOs which don't need DDM access
from .AdderPluginBase import AdderPluginBase
class AdderDummyPlugin(AdderPluginBase):
# constructor
def __init__(self, job, **params):
AdderPluginBase.__init__(self, job, params)
# main
def execute(self):
self.result.setSucceeded()
return
| PanDAWMS/panda-server | pandaserver/dataservice/AdderDummyPlugin.py | Python | apache-2.0 | 349 |
# encoding: utf-8
# Author: Zhang Huangbin <[email protected]>
import web
import settings
from libs import iredutils
from libs.languages import get_language_maps
from libs.pgsql import decorators, user as userlib, domain as domainlib, connUtils
session = web.config.get('_session')
class List:
@decorators.require_login
def GET(self, domain, cur_page=1):
self.domain = web.safestr(domain).split('/', 1)[0]
cur_page = int(cur_page)
if not iredutils.is_domain(self.domain):
raise web.seeother('/domains?msg=INVALID_DOMAIN_NAME')
if cur_page == 0:
cur_page = 1
userLib = userlib.User()
result = userLib.listAccounts(domain=self.domain, cur_page=cur_page,)
if result[0] is True:
(total, records) = (result[1], result[2])
return web.render(
'pgsql/user/list.html',
cur_domain=self.domain,
cur_page=cur_page,
total=total,
users=records,
msg=web.input().get('msg', None),
)
else:
raise web.seeother('/domains?msg=%s' % web.urlquote(result[1]))
@decorators.csrf_protected
@decorators.require_login
def POST(self, domain):
i = web.input(_unicode=False, mail=[])
self.domain = str(domain)
if not iredutils.is_domain(self.domain):
raise web.seeother('/domains?msg=INVALID_DOMAIN_NAME')
self.mails = [str(v)
for v in i.get('mail', [])
if iredutils.is_email(v)
and str(v).endswith('@' + self.domain)
]
action = i.get('action', '')
msg = i.get('msg', None)
userLib = userlib.User()
if action == 'delete':
result = userLib.delete(domain=self.domain, mails=self.mails,)
msg = 'DELETED'
elif action == 'disable':
result = userLib.enableOrDisableAccount(domain=self.domain, accounts=self.mails, active=False)
msg = 'DISABLED'
elif action == 'enable':
result = userLib.enableOrDisableAccount(domain=self.domain, accounts=self.mails, active=True)
msg = 'ENABLED'
else:
result = (False, 'INVALID_ACTION')
if result[0] is True:
raise web.seeother('/users/%s?msg=%s' % (self.domain, msg,))
else:
raise web.seeother('/users/%s?msg=%s' % (self.domain, web.urlquote(result[1]),))
class Profile:
@decorators.require_login
def GET(self, profile_type, mail):
i = web.input()
self.mail = str(mail).lower()
self.cur_domain = self.mail.split('@', 1)[-1]
self.profile_type = str(profile_type)
if self.mail.startswith('@') and iredutils.is_domain(self.cur_domain):
# Catchall account.
raise web.seeother('/profile/domain/catchall/%s' % self.cur_domain)
if not iredutils.is_email(self.mail):
raise web.seeother('/domains?msg=INVALID_USER')
if not iredutils.is_domain(self.cur_domain):
raise web.seeother('/domains?msg=INVALID_DOMAIN_NAME')
userLib = userlib.User()
qr = userLib.profile(domain=self.cur_domain, mail=self.mail)
if qr[0] is True:
self.profile = qr[1]
else:
raise web.seeother('/users/%s?msg=%s' % (self.cur_domain, web.urlquote(qr[1])))
return web.render(
'pgsql/user/profile.html',
cur_domain=self.cur_domain,
mail=self.mail,
profile_type=self.profile_type,
profile=self.profile,
languagemaps=get_language_maps(),
msg=i.get('msg'),
)
@decorators.csrf_protected
@decorators.require_login
def POST(self, profile_type, mail):
i = web.input(
enabledService=[],
#mailForwardingAddress=[],
shadowAddress=[],
telephoneNumber=[],
memberOfGroup=[],
oldMemberOfAlias=[],
memberOfAlias=[],
#whitelistSender=[],
#blacklistSender=[],
#whitelistRecipient=[],
#blacklistRecipient=[],
)
self.profile_type = web.safestr(profile_type)
self.mail = str(mail).lower()
userLib = userlib.User()
result = userLib.update(
profile_type=self.profile_type,
mail=self.mail,
data=i,
)
if result[0] is True:
raise web.seeother('/profile/user/%s/%s?msg=UPDATED' % (self.profile_type, self.mail))
else:
raise web.seeother('/profile/user/%s/%s?msg=%s' % (self.profile_type, self.mail, web.urlquote(result[1])))
class Create:
@decorators.require_login
def GET(self, domain=None,):
if domain is None:
self.cur_domain = None
else:
self.cur_domain = str(domain)
if not iredutils.is_domain(self.cur_domain):
raise web.seeother('/domains?msg=INVALID_DOMAIN_NAME')
i = web.input()
# Get all managed domains.
connutils = connUtils.Utils()
qr = connutils.getManagedDomains(admin=session.get('username'), domainNameOnly=True,)
if qr[0] is True:
allDomains = qr[1]
else:
raise web.seeother('/domains?msg=' % web.urlquote(qr[1]))
# Set first domain as current domain.
if self.cur_domain is None:
if len(allDomains) > 0:
raise web.seeother('/create/user/%s' % str(allDomains[0]))
else:
raise web.seeother('/domains?msg=NO_DOMAIN_AVAILABLE')
# Get domain profile.
domainLib = domainlib.Domain()
resultOfProfile = domainLib.profile(domain=self.cur_domain)
if resultOfProfile[0] is True:
self.profile = resultOfProfile[1]
else:
raise web.seeother('/domains?msg=%s' % web.urlquote(resultOfProfile[1]))
return web.render(
'pgsql/user/create.html',
cur_domain=self.cur_domain,
allDomains=allDomains,
profile=self.profile,
min_passwd_length=settings.min_passwd_length,
max_passwd_length=settings.max_passwd_length,
msg=i.get('msg'),
)
@decorators.csrf_protected
@decorators.require_login
def POST(self, domain):
i = web.input()
# Get domain name, username, cn.
self.username = web.safestr(i.get('username', ''))
self.cur_domain = web.safestr(i.get('domainName', ''))
userLib = userlib.User()
result = userLib.add(domain=self.cur_domain, data=i)
if result[0] is True:
raise web.seeother('/profile/user/general/%s@%s?msg=CREATED' % (self.username, self.cur_domain))
else:
raise web.seeother('/create/user/%s?msg=%s' % (self.cur_domain, web.urlquote(result[1])))
| shyaken/cp.eaemcb | controllers/pgsql/user.py | Python | gpl-2.0 | 7,014 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the knowledge base."""
from __future__ import unicode_literals
import unittest
from plaso.containers import artifacts
from plaso.engine import knowledge_base
from tests import test_lib as shared_test_lib
class KnowledgeBaseTest(shared_test_lib.BaseTestCase):
"""Tests for the knowledge base."""
# pylint: disable=protected-access
_MACOS_PATHS = [
'/Users/dude/Library/Application Data/Google/Chrome/Default/Extensions',
('/Users/dude/Library/Application Data/Google/Chrome/Default/Extensions/'
'apdfllckaahabafndbhieahigkjlhalf'),
'/private/var/log/system.log',
'/Users/frank/Library/Application Data/Google/Chrome/Default',
'/Users/hans/Library/Application Data/Google/Chrome/Default',
('/Users/frank/Library/Application Data/Google/Chrome/Default/'
'Extensions/pjkljhegncpnkpknbcohdijeoejaedia'),
'/Users/frank/Library/Application Data/Google/Chrome/Default/Extensions']
_MACOS_USERS = [
{'name': 'root', 'path': '/var/root', 'sid': '0'},
{'name': 'frank', 'path': '/Users/frank', 'sid': '4052'},
{'name': 'hans', 'path': '/Users/hans', 'sid': '4352'},
{'name': 'dude', 'path': '/Users/dude', 'sid': '1123'}]
_WINDOWS_PATHS = [
'C:\\Users\\Dude\\SomeFolder\\Chrome\\Default\\Extensions',
('C:\\Users\\Dude\\SomeNoneStandardFolder\\Chrome\\Default\\Extensions\\'
'hmjkmjkepdijhoojdojkdfohbdgmmhki'),
('C:\\Users\\frank\\AppData\\Local\\Google\\Chrome\\Extensions\\'
'blpcfgokakmgnkcojhhkbfbldkacnbeo'),
'C:\\Users\\frank\\AppData\\Local\\Google\\Chrome\\Extensions',
('C:\\Users\\frank\\AppData\\Local\\Google\\Chrome\\Extensions\\'
'icppfcnhkcmnfdhfhphakoifcfokfdhg'),
'C:\\Windows\\System32',
'C:\\Stuff/with path separator\\Folder']
_WINDOWS_USERS = [
{'name': 'dude', 'path': 'C:\\Users\\dude', 'sid': 'S-1'},
{'name': 'frank', 'path': 'C:\\Users\\frank', 'sid': 'S-2'}]
def _SetUserAccounts(self, knowledge_base_object, users):
"""Sets the user accounts in the knowledge base.
Args:
knowledge_base_object (KnowledgeBase): knowledge base.
users (list[dict[str,str])): users.
"""
for user in users:
identifier = user.get('sid', user.get('uid', None))
if not identifier:
continue
user_account = artifacts.UserAccountArtifact(
identifier=identifier, user_directory=user.get('path', None),
username=user.get('name', None))
knowledge_base_object.AddUserAccount(user_account)
def testCodepageProperty(self):
"""Tests the codepage property."""
knowledge_base_object = knowledge_base.KnowledgeBase()
self.assertEqual(knowledge_base_object.codepage, 'cp1252')
def testHostnameProperty(self):
"""Tests the hostname property."""
knowledge_base_object = knowledge_base.KnowledgeBase()
self.assertEqual(knowledge_base_object.hostname, '')
def testOperatingSystemProperty(self):
"""Tests the operating_system property."""
knowledge_base_object = knowledge_base.KnowledgeBase()
operating_system = knowledge_base_object.GetValue('operating_system')
self.assertIsNone(operating_system)
knowledge_base_object.SetValue('operating_system', 'Windows')
operating_system = knowledge_base_object.GetValue('operating_system')
self.assertEqual(operating_system, 'Windows')
def testTimezoneProperty(self):
"""Tests the timezone property."""
knowledge_base_object = knowledge_base.KnowledgeBase()
self.assertEqual(knowledge_base_object.timezone.zone, 'UTC')
def testUserAccountsProperty(self):
"""Tests the user accounts property."""
knowledge_base_object = knowledge_base.KnowledgeBase()
self.assertEqual(len(knowledge_base_object.user_accounts), 0)
user_account = artifacts.UserAccountArtifact(
identifier='1000', user_directory='/home/testuser',
username='testuser')
knowledge_base_object.AddUserAccount(user_account)
self.assertEqual(len(knowledge_base_object.user_accounts), 1)
def testYearProperty(self):
"""Tests the year property."""
knowledge_base_object = knowledge_base.KnowledgeBase()
self.assertEqual(knowledge_base_object.year, 0)
def testAddUserAccount(self):
"""Tests the AddUserAccount function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
user_account = artifacts.UserAccountArtifact(
identifier='1000', user_directory='/home/testuser',
username='testuser')
knowledge_base_object.AddUserAccount(user_account)
with self.assertRaises(KeyError):
knowledge_base_object.AddUserAccount(user_account)
def testAddEnvironmentVariable(self):
"""Tests the AddEnvironmentVariable function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
environment_variable = artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='SystemRoot', value='C:\\Windows')
knowledge_base_object.AddEnvironmentVariable(environment_variable)
with self.assertRaises(KeyError):
knowledge_base_object.AddEnvironmentVariable(environment_variable)
def testGetEnvironmentVariable(self):
"""Tests the GetEnvironmentVariable functions."""
knowledge_base_object = knowledge_base.KnowledgeBase()
environment_variable = artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='SystemRoot', value='C:\\Windows')
knowledge_base_object.AddEnvironmentVariable(environment_variable)
test_environment_variable = knowledge_base_object.GetEnvironmentVariable(
'SystemRoot')
self.assertIsNotNone(test_environment_variable)
test_environment_variable = knowledge_base_object.GetEnvironmentVariable(
'sYsTeMrOoT')
self.assertIsNotNone(test_environment_variable)
test_environment_variable = knowledge_base_object.GetEnvironmentVariable(
'Bogus')
self.assertIsNone(test_environment_variable)
def testGetEnvironmentVariables(self):
"""Tests the GetEnvironmentVariables function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
environment_variable = artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='SystemRoot', value='C:\\Windows')
knowledge_base_object.AddEnvironmentVariable(environment_variable)
environment_variable = artifacts.EnvironmentVariableArtifact(
case_sensitive=False, name='WinDir', value='C:\\Windows')
knowledge_base_object.AddEnvironmentVariable(environment_variable)
environment_variables = knowledge_base_object.GetEnvironmentVariables()
self.assertEqual(len(environment_variables), 2)
def testGetHostname(self):
"""Tests the GetHostname function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
hostname = knowledge_base_object.GetHostname()
self.assertEqual(hostname, '')
# TODO: add tests for GetMountPoint.
def testGetSourceConfigurationArtifacts(self):
"""Tests the GetSourceConfigurationArtifacts function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
hostname_artifact = artifacts.HostnameArtifact(name='myhost.mydomain')
knowledge_base_object.SetHostname(hostname_artifact)
user_account = artifacts.UserAccountArtifact(
identifier='1000', user_directory='/home/testuser',
username='testuser')
knowledge_base_object.AddUserAccount(user_account)
source_configurations = (
knowledge_base_object.GetSourceConfigurationArtifacts())
self.assertEqual(len(source_configurations), 1)
self.assertIsNotNone(source_configurations[0])
system_configuration = source_configurations[0].system_configuration
self.assertIsNotNone(system_configuration)
self.assertIsNotNone(system_configuration.hostname)
self.assertEqual(system_configuration.hostname.name, 'myhost.mydomain')
def testGetSystemConfigurationArtifact(self):
"""Tests the _GetSystemConfigurationArtifact function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
hostname_artifact = artifacts.HostnameArtifact(name='myhost.mydomain')
knowledge_base_object.SetHostname(hostname_artifact)
user_account = artifacts.UserAccountArtifact(
identifier='1000', user_directory='/home/testuser',
username='testuser')
knowledge_base_object.AddUserAccount(user_account)
system_configuration = (
knowledge_base_object._GetSystemConfigurationArtifact())
self.assertIsNotNone(system_configuration)
self.assertIsNotNone(system_configuration.hostname)
self.assertEqual(system_configuration.hostname.name, 'myhost.mydomain')
# TODO: add tests for GetTextPrepend.
def testGetUsernameByIdentifier(self):
"""Tests the GetUsernameByIdentifier function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
user_account = artifacts.UserAccountArtifact(
identifier='1000', user_directory='/home/testuser',
username='testuser')
knowledge_base_object.AddUserAccount(user_account)
usename = knowledge_base_object.GetUsernameByIdentifier('1000')
self.assertEqual(usename, 'testuser')
usename = knowledge_base_object.GetUsernameByIdentifier(1000)
self.assertEqual(usename, '')
usename = knowledge_base_object.GetUsernameByIdentifier('1001')
self.assertEqual(usename, '')
def testGetUsernameForPath(self):
"""Tests the GetUsernameForPath function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
self._SetUserAccounts(knowledge_base_object, self._MACOS_USERS)
username = knowledge_base_object.GetUsernameForPath(
self._MACOS_PATHS[0])
self.assertEqual(username, 'dude')
username = knowledge_base_object.GetUsernameForPath(
self._MACOS_PATHS[4])
self.assertEqual(username, 'hans')
username = knowledge_base_object.GetUsernameForPath(
self._WINDOWS_PATHS[0])
self.assertIsNone(username)
knowledge_base_object = knowledge_base.KnowledgeBase()
self._SetUserAccounts(knowledge_base_object, self._WINDOWS_USERS)
username = knowledge_base_object.GetUsernameForPath(
self._WINDOWS_PATHS[0])
self.assertEqual(username, 'dude')
username = knowledge_base_object.GetUsernameForPath(
self._WINDOWS_PATHS[2])
self.assertEqual(username, 'frank')
username = knowledge_base_object.GetUsernameForPath(
self._MACOS_PATHS[2])
self.assertIsNone(username)
def testGetSetValue(self):
"""Tests the Get and SetValue functions."""
knowledge_base_object = knowledge_base.KnowledgeBase()
expected_value = 'test value'
knowledge_base_object.SetValue('Test', expected_value)
value = knowledge_base_object.GetValue('Test')
self.assertEqual(value, expected_value)
value = knowledge_base_object.GetValue('tEsT')
self.assertEqual(value, expected_value)
value = knowledge_base_object.GetValue('Bogus')
self.assertIsNone(value)
def testHasUserAccounts(self):
"""Tests the HasUserAccounts function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
self.assertFalse(knowledge_base_object.HasUserAccounts())
user_account = artifacts.UserAccountArtifact(
identifier='1000', user_directory='/home/testuser',
username='testuser')
knowledge_base_object.AddUserAccount(user_account)
self.assertTrue(knowledge_base_object.HasUserAccounts())
def testReadSystemConfigurationArtifact(self):
"""Tests the ReadSystemConfigurationArtifact function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
system_configuration = artifacts.SystemConfigurationArtifact()
system_configuration.hostname = artifacts.HostnameArtifact(
name='myhost.mydomain')
user_account = artifacts.UserAccountArtifact(
identifier='1000', user_directory='/home/testuser',
username='testuser')
system_configuration.user_accounts.append(user_account)
knowledge_base_object.ReadSystemConfigurationArtifact(system_configuration)
hostname = knowledge_base_object.GetHostname()
self.assertEqual(hostname, 'myhost.mydomain')
def testSetActiveSession(self):
"""Tests the SetActiveSession function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
knowledge_base_object.SetActiveSession('ddda05bedf324cbd99fa8c24b8a0037a')
self.assertEqual(
knowledge_base_object._active_session,
'ddda05bedf324cbd99fa8c24b8a0037a')
knowledge_base_object.SetActiveSession(
knowledge_base_object._DEFAULT_ACTIVE_SESSION)
self.assertEqual(
knowledge_base_object._active_session,
knowledge_base_object._DEFAULT_ACTIVE_SESSION)
def testSetCodepage(self):
"""Tests the SetCodepage function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
knowledge_base_object.SetCodepage('cp1252')
with self.assertRaises(ValueError):
knowledge_base_object.SetCodepage('bogus')
def testSetHostname(self):
"""Tests the SetHostname function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
hostname_artifact = artifacts.HostnameArtifact(name='myhost.mydomain')
knowledge_base_object.SetHostname(hostname_artifact)
# TODO: add tests for SetMountPoint.
# TODO: add tests for SetTextPrepend.
def testSetTimeZone(self):
"""Tests the SetTimeZone function."""
knowledge_base_object = knowledge_base.KnowledgeBase()
knowledge_base_object.SetTimeZone('Europe/Zurich')
with self.assertRaises(ValueError):
knowledge_base_object.SetTimeZone('Bogus')
if __name__ == '__main__':
unittest.main()
| rgayon/plaso | tests/engine/knowledge_base.py | Python | apache-2.0 | 13,664 |
## ftpoPy ##
#
# This program allows a user to take control of a remote computer using email.
# The following code is the server part of the application. The client part is
# not part of this project. Any email client can be used.
# Copyright (C) 2008,2009 Philippe Chretien
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License Version 2
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# You will find the latest version of this code at the following address:
# http://github.com/pchretien
#
# You can contact me at the following email address:
# [email protected]
import os
import poplib
import smtplib
import email
import time
from email import encoders
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from ft_mail import *
from ft_cmd import ICommand
from ft_cmd_factory import CommandFactory
class MailProcessor:
__mailUtil = None
__pollingPeriod = 15
__users = []
__currentUser = ""
__factory = None
def __init__(self, argv):
# Display usage is not started with the proper arguments
if len(argv) < 5:
print "usage: ft_main.py pop_server pop_username pop_password smtp_server [polling_period]"
quit()
# The polling period in seconds
if len(argv) > 5:
self.__pollingPeriod = int(argv[5])
# Create an instance of the email utility.
self.__mailUtil = MailUtil(argv[1], argv[2], argv[3], argv[4])
self.loadUsers()
self.__factory = CommandFactory(self)
def loadUsers(self):
print "Reading onfigurations ..."
self.__users = list()
file = open("./ft_users.config")
lines = file.read().splitlines()
for line in lines:
tokens = line.split('=')
if len(tokens[0]) > 0:
self.__users.append([tokens[0], tokens[1]])
file.close()
def saveUsers(self):
print "Writing onfigurations ..."
file = open("./ft_users.config", "w")
for u, p in self.__users:
file.write("%s=%s\n" % (u,p))
file.close()
def setPollingPeriod(self, period):
print "Changing polling period to %d" % (int(period))
self.__pollingPeriod = int(period)
def changePassword(self, newPassword, newPasswordAgain):
print "Changing password for user %s" % (self.__currentUser)
for u, p in self.__users:
u2 = "<"+u.strip().lower()+">"
if self.__currentUser.strip().lower().find(u2) > -1:
self.__users.remove([u,p])
self.__users.append([u,newPassword])
return True
return False
def checkPassword(self, password):
for u, p in self.__users:
u = "<"+u.strip().lower()+">"
if self.__currentUser.strip().lower().find(u) > -1 and password.strip() == p.strip():
return True
return False
def run(self):
print "Ready."
while True:
msg = self.__mailUtil.getTopMessage()
if msg is None:
time.sleep(self.__pollingPeriod)
continue
response = self.processMessage(msg)
if response != None:
self.__mailUtil.sendMessage(response)
# self.__mailUtil.delTopMessage()
quit()
print "Ready."
def processMessage(self, inMsg):
try:
startTime = time.time()
print "processing message from: " + inMsg['From']
payload = inMsg.get_payload()
while isinstance(payload,list):
payload = payload[0].get_payload()
reply = MIMEMultipart()
allAttachments = []
allResponses = "FTPOPY reply to the following commands:\n\n%s" % (payload)
# Check password
self.__currentUser = inMsg['From']
if self.checkPassword(inMsg['Subject']):
# This piece of code splits commands
currentCommand = ""
commandLines = []
for line in payload.split('\n'):
if line.find("/") == 0:
currentCommand = currentCommand.replace("\n", "")
currentCommand = currentCommand.replace("\r", "")
currentCommand = currentCommand.replace("=20", "")
currentCommand = currentCommand.replace("=", "")
currentCommand = currentCommand.strip()
commandLines.append(currentCommand)
currentCommand = ""
else:
currentCommand = currentCommand + line
for line in commandLines:
if len(line) == 0:
continue
# The the command objects
command = self.__factory.getCommand(line, inMsg)
command.execute()
response = command.response()
if isinstance(response, str) or isinstance(response, basestring):
allResponses = allResponses + "\n\n->" + line + "\n" + response
else:
allAttachments.append(response)
else:
allResponses += "\n\nAccess denied.\n"
allResponses += "Invalid email or password\n"
except:
print "processMsg() failed!"
raise
return None
allResponses += "\n\n"
allResponses += "Available commands are: \n? (help), \nGET (download a file), \nPUT (upload a file), \nFTPOP (ftpoPY server management) \n... and all commands supported by the remote shell.\n"
allResponses += 'All commands must be separated by an empty line starting with /\n'
allResponses += "\n"
allResponses += "Visit the project website at http://www.basbrun.com/?ftpopy\n"
#reply['Subject'] = "RE:" + inMsg["Subject"]
reply['Subject'] = "RE: Executed on " + time.asctime()+ " in %8.2f seconds" % (time.time()-startTime)
reply['From'] = inMsg["To"]
reply['To'] = inMsg["From"]
reply.attach(MIMEText(allResponses))
for attachment in allAttachments:
reply.attach(attachment)
return reply
| pchretien/ftpopy | python/ft_processor.py | Python | gpl-2.0 | 7,260 |
from gmrf import Covariance
from gmrf import GaussianField
import Tasmanian
import numpy as np
import matplotlib.pyplot as plt
import unittest
from scipy.stats import norm
class TestGaussHermite(unittest.TestCase):
def test_standard_normal(self):
"""
Test modes of a standard normal density
"""
# Initialize sparse grid
dim = 1
level = 3
moments = [1,0,1,0,3]
# Define Gauss-Hermite physicist's rule exp(-x**2)
grid = Tasmanian.makeGlobalGrid(dim, 1, level, "level", "gauss-hermite")
#
# Explicit
#
for i in range(len(moments)):
z = grid.getPoints() # quadrature nodes
w = grid.getQuadratureWeights() # quadrature weights
y = np.sqrt(2)*z # transform to N(0,1)
c_norm = np.sqrt(np.pi)**dim # normalization constant
mom_a = np.sum(w*(y[:,0]**i))/c_norm
mom_e = moments[i]
self.assertAlmostEqual(mom_a, mom_e)
#
# Using integrate
#
for i in range(len(moments)):
z = grid.getPoints() # quadrature nodes
y = np.sqrt(2)*z # transform to N(0,1)
c_norm = np.sqrt(np.pi)**dim # normalization constant
grid.loadNeededPoints(y**i)
mom_a = grid.integrate()/c_norm
mom_e = moments[i]
self.assertAlmostEqual(mom_a[0], mom_e)
def test_gaussian_random_field(self):
"""
Reproduce statistics of Gaussian random field
"""
#
# Define Gaussian Field with degenerate support
#
oort = 1/np.sqrt(2)
V = np.array([[0.5, oort, 0, 0.5],
[0.5, 0, -oort, -0.5],
[0.5, -oort, 0, 0.5],
[0.5, 0, oort, -0.5]])
# Eigenvalues
d = np.array([4,3,2,1], dtype=float)
Lmd = np.diag(d)
# Covariance matrix
K = V.dot(Lmd.dot(V.T))
mu = np.array([1,2,3,4])[:,None]
# Zero mean Gaussian field
dim = 4
eta = GaussianField(dim, mean=mu, K=K, mode='covariance')
n_vars = eta.covariance().size()
level = 1
# Define Gauss-Hermite physicist's rule exp(-x**2)
grid = Tasmanian.makeGlobalGrid(n_vars, 4, level, "level", "gauss-hermite-odd")
# Evaluate the Gaussian random field at the Gauss points
z = grid.getPoints()
y = np.sqrt(2)*z
const_norm = np.sqrt(np.pi)**n_vars
# Evaluate the random field at the Gauss points
w = grid.getQuadratureWeights()
etay = eta.sample(z=y.T)
n = grid.getNumPoints()
I = np.zeros(4)
II = np.zeros((4,4))
for i in range(n):
II += w[i]*np.outer(etay[:,i]-mu.ravel(),etay[:,i]-mu.ravel())
I += w[i]*etay[:,i]
I /= const_norm
II /= const_norm
self.assertTrue(np.allclose(II,K))
self.assertTrue(np.allclose(I,mu.ravel()))
def test_interpolant(self):
dim = 1
level = 3
grid = Tasmanian.makeGlobalGrid(dim,1,level,'level','gauss-hermite')
#f = lambda x: np.exp(-np.abs(x))
f = lambda x: np.sum(x**3,axis=1)[:,None]
# Evaluate function at abscissae
z = grid.getPoints()
fz = f(z)
# Store points in grid
grid.loadNeededPoints(fz)
# Evaluate on a finer grid
x = np.linspace(-1,1,100)[:,None]
y = grid.evaluateBatch(x)
# Check accuracy
self.assertTrue(np.allclose(y,f(x)))
def test_surrogate(self):
#
# Use sparse grid interpolant to sample
#
dim = 1
level = 3
grid = Tasmanian.makeGlobalGrid(dim,1,level,'level','gauss-hermite')
# Convert from physicist's to probabilist's variable
z = np.sqrt(2)*grid.getPoints()
# Evaluate function at given points and store
f = lambda x: x**2
fz = f(z)
grid.loadNeededPoints(fz)
# Generate random sample of standard normal variables
x = np.random.normal(size=(10000,1))
# Convert to physicist's domain and evaluate batch
x2 = grid.evaluateBatch(x/np.sqrt(2))
self.assertTrue(np.allclose(x2,x**2))
I = grid.integrate()/np.sqrt(np.pi)
self.assertAlmostEqual(I[0],1)
def test_transform(self):
"""
Approximate moments of a Gaussian random vector
X ~ N([3,4], [[2,1],[1,3]])
by a sparse grid method based on the interval [-1,1]^2
"""
#
# Define Sparse Grid on [-1,1]^2
#
dim = 2
level = 40
grid = Tasmanian.makeGlobalGrid(dim,1,level,'level','gauss-legendre')
n_points = grid.getNumPoints()
y = grid.getPoints()
#
# Transform Points to Z~N(0,I)
#
z = norm.ppf(0.5*y+0.5)
dz = 0.5**dim
#
# Define Gaussian Field
#
K = np.array([[2,1],[1,3]])
m = np.array([3,4])
# Eigendecomposition
lmd, V = np.linalg.eigh(K)
lmd = lmd[::-1]
V = V[:,::-1]
sqrtD = np.diag(np.sqrt(lmd))
X = V.dot(sqrtD.dot(z.T))
Y = X + np.tile(m[:,None],(1,n_points))
#
# Recompute mean and covariance matrix
#
w = grid.getQuadratureWeights()*dz
ma = np.zeros(2)
Ka = 0
for i in range(n_points):
ma += Y[:,i]*w[i]
Ka += X[1,i]*X[0,i]*w[i]
| hvanwyk/quadmesh | tests/test_gmrf/test_sparse_grid.py | Python | mit | 5,953 |
from datetime import date
from decimal import Decimal
from webhelpers.pylonslib.secure_form import token_key
from bluechips.tests import *
from bluechips import model
from bluechips.model import meta
class TestTransferController(TestController):
def test_index(self):
response = self.app.get(url_for(controller='transfer',
action='index'))
# Test response...
response.mustcontain('Add a New Transfer')
form = response.form
user_rich = meta.Session.query(model.User).\
filter_by(name=u'Rich Scheme').one()
user_ben = meta.Session.query(model.User).\
filter_by(name=u'Ben Bitdiddle').one()
form['debtor_id'] = user_rich.id
form['creditor_id'] = user_ben.id
form['amount'] = '123.45'
# Make sure date is today.
today = date.today()
assert form['date'].value == today.strftime('%m/%d/%Y')
form['description'] = 'A test transfer from Rich to Ben'
response = form.submit()
response = response.follow()
response.mustcontain('Transfer', 'created.')
t = meta.Session.query(model.Transfer).\
order_by(model.Transfer.id.desc()).first()
assert t.debtor.name == u'Rich Scheme'
assert t.creditor.name == u'Ben Bitdiddle'
assert t.amount == 12345
assert t.date == today
assert t.description == u'A test transfer from Rich to Ben'
def test_edit_and_delete(self):
user_rich = meta.Session.query(model.User).\
filter_by(name=u'Rich Scheme').one()
user_ben = meta.Session.query(model.User).\
filter_by(name=u'Ben Bitdiddle').one()
t = model.Transfer(user_rich, user_ben, 12345)
t.description = u'Test transfer'
meta.Session.add(t)
meta.Session.commit()
response = self.app.get(url_for(controller='transfer',
action='edit',
id=t.id))
response.mustcontain('Edit a Transfer')
form = response.form
assert int(form['debtor_id'].value) == t.debtor_id
assert int(form['creditor_id'].value) == t.creditor_id
assert Decimal(form['amount'].value) * 100 == t.amount
assert form['date'].value == t.date.strftime('%m/%d/%Y')
assert form['description'].value == t.description
form['description'] = u'A new description'
response = form.submit()
response = response.follow()
response.mustcontain('Transfer', 'updated.')
t = meta.Session.query(model.Transfer).\
order_by(model.Transfer.id.desc()).first()
assert t.description == u'A new description'
response = self.app.get(url_for(controller='transfer',
action='delete',
id=t.id))
response = response.form.submit('delete').follow()
response.mustcontain('Transfer', 'deleted')
def test_edit_nonexistent(self):
response = self.app.get(url_for(controller='transfer',
action='edit',
id=21424), status=404)
def test_update_nonexistent(self):
response = self.app.get(url_for(controller='transfer',
action='edit'))
params = self.sample_params.copy()
params[token_key] = response.form[token_key].value
self.app.post(url_for(controller='transfer',
action='update',
id=21424),
params=params,
status=404)
def test_xsrf_protection(self):
self.app.post(url_for(controller='transfer',
action='update'),
params=self.sample_params,
status=403)
def test_update_get_redirects(self):
response = self.app.get(url_for(controller='transfer',
action='update'),
status=302)
assert (dict(response.headers)['Location'] ==
url_for(controller='transfer', action='edit', qualified=True))
def test_delete_nonexistent(self):
self.app.get(url_for(controller='transfer',
action='delete',
id=124244),
status=404)
def test_destroy_nonexistent(self):
response = self.app.get(url_for(controller='transfer',
action='edit'))
params = self.sample_params.copy()
params[token_key] = response.form[token_key].value
self.app.post(url_for(controller='transfer',
action='destroy',
id=124344),
params=params,
status=404)
def test_delete_xsrf_protection(self):
self.app.post(url_for(controller='transfer',
action='destroy',
id=1),
params={'delete': 'Delete'},
status=403)
def setUp(self):
self.sample_params = {
'debtor_id': '1',
'creditor_id': '2',
'amount': '33.98',
'date': '4/1/2007',
'description': 'Example transfer params.'}
def tearDown(self):
transfers = meta.Session.query(model.Transfer).all()
for t in transfers:
meta.Session.delete(t)
meta.Session.commit()
| ebroder/bluechips | bluechips/tests/functional/test_transfer.py | Python | gpl-2.0 | 5,692 |
SCREEN_SIZE = [640, 480]
SHIP_NORMAL_POINTS = [(0,25), (-13,-15), (-9, -6), (9, -6), (13,-15)]
SHIP_ACCEL_POINTS = [(0,25), (-13,-15), (-9, -6), (9, -6), (0, -17), (-9, -6), (9, -6), (13,-15)]
ASTEROID1_POINTS = [(-8, -30), (-30, -9), (-15, 2), (-30, 4), (-15, 30), (0, 10), (0, 30), (16, 30), (30, 4), (30, -10), (16, -30)]
ASTEROID2_POINTS = [(-13, -30), (-30, -16), (-21, -3), (-30, 11), (-13, 30), (-4, 16), (12, 26), (30, 6), (14, -8), (27, -15), (14, -30), (0, -21)]
ASTEROID3_POINTS = [(-14, -30), (-30, -16), (-30, 14), (-13, 30), (10, 30), (30, 10), (22, -2), (30, -15), (16, -30), (0, -13)]
UFO_POINTS = [(-7, -20), (-12, -7), (-30, 5), (-11, 19), (11, 19), (30, 5), (12, -7), (7, -20), (-7, -20), (-12, -7), (12, -7), (30, 5), (-30, 5)]
SHOT_POINTS = [(0, 0), (0, 10)]
| italomaia/turtle-linux | games/Vectorpods2/src/Constants.py | Python | gpl-3.0 | 787 |
from src.business.schedulers.qthreadClock import QThreadClock
from src.utils.Singleton import Singleton
class SchedClock(metaclass=Singleton):
def __init__(self, lcd_display):
self.lcd = lcd_display
self.threadClock = QThreadClock()
self.threadClock.time_signal.connect(self.refresh)
def start_scheduler(self):
self.threadClock.start()
# Refreshing Clock
def refresh(self, value):
self.lcd.setText(value)
| pliniopereira/ccd3 | src/business/schedulers/schedClock.py | Python | gpl-3.0 | 465 |
#!/usr/bin/env python2
import os
import re
import sys
import json
import logging
import argparse
import urlparse
import robotparser
import ConfigParser
import requests
import amqplib.client_0_8 as amqp
parser = argparse.ArgumentParser()
parser.add_argument('-v', dest='verbose', action='store_true', default=False, help="Verbose operation")
parser.add_argument('--debug', action='store_true', default=False, help="very verbose operation")
parser.add_argument('-c', dest='config', default='config.ini', help="Path to config file")
parser.add_argument('--receive', action='store_true', default=False, help="Receive status updates from queue")
args = parser.parse_args()
logging.basicConfig(
level=logging.DEBUG if args.debug else logging.INFO if args.verbose else logging.WARN,
format="%(asctime)s\t%(levelname)s\t%(message)s",
datefmt="[%Y-%m-%d %H:%M:%S]",
)
"""
This daemon listens on a dedicated queue for URLs to check. For each
received URL, the program attempts to fetch the robots.txt file on the
target domain. If robots.txt indicates that a resource is not available
to spiders, the URL is dropped and the status is written back to the DB.
If robots.txt permits spidering of the target URL, the message is forwarded
to the regular per-isp queues.
The content of the robots.txt file for each domain is cached for <n> days
(configurable)
This script was written in python to take advantage of the standard library's
robots.txt parser.
"""
class BlockedRobotsTxtChecker(object):
def __init__(self, config, conn, ch):
self.config = config
self.conn = conn
self.ch = ch
self.headers = {'User-agent': self.config.get('daemon','useragent')}
self.load_exclude()
def load_exclude(self):
self.exclude = []
try:
for (name,value) in self.config.items('robots_override'):
# name is always exclude
self.exclude.append( re.compile(value) )
except ConfigParser.NoSectionError:
pass
print self.exclude
def is_excluded(self, url):
return any([ exc.match(url) for exc in self.exclude ])
def get_robots_url(self, url):
"""Split URL, add /robots.txt resource"""
parts = urlparse.urlparse(url)
return urlparse.urlunparse( parts[:2] + ('/robots.txt','','','') )
def set_url_status(self, url, status):
if self.config.has_option('daemon', 'status-queue'):
self.set_url_status_amqp(url, status)
else:
self.set_url_status_db(url, status)
def set_url_status_db(self, url, status):
c = self.conn.cursor()
c.execute("""update urls set status = %s where url = %s""", [ status, url])
c.close()
self.conn.commit()
def set_url_status_amqp(self, url, status):
logging.debug("Sending status message")
msg = {'url': url, 'status': status}
msgsend = amqp.Message(json.dumps(msg))
self.ch.basic_publish(msgsend, self.config.get('daemon', 'exchange'), 'status')
def receive_status(self, msg):
data = json.loads(msg.body)
self.ch.basic_ack(msg.delivery_tag)
self.set_url_status_db(data['url'], data['status'])
logging.info("Set status: %s, url: %s", data['status'], data['url'])
return True
def check_robots(self,msg):
data = json.loads(msg.body)
self.ch.basic_ack(msg.delivery_tag)
# get the robots.txt URL
url = self.get_robots_url(data['url'])
logging.info("Using robots url: %s", url)
try:
# fetch robots.txt
robots_txt = requests.get(url, headers=self.headers, timeout=5)
# pass the content to the robots.txt parser
rbp = robotparser.RobotFileParser()
rbp.parse(robots_txt.text.splitlines())
# check to see if we're allowed in - test using OrgProbe's useragent
if self.is_excluded(data['url']):
logging.info("Overridden: %s", data['url'])
elif not rbp.can_fetch(self.config.get('daemon','probe_useragent'), data['url']):
logging.warn("Disallowed: %s", data['url'])
# write rejection to DB
self.set_url_status(data['url'], 'disallowed-by-robots-txt')
return True
else:
# we're allowed in.
logging.info("Allowed: %s", data['url'])
except Exception,v:
# if anything bad happens, log it but continue
logging.error("Exception: %s", v)
# now do a head request for size and mime type
try:
req = requests.head(data['url'], headers=self.headers, timeout=5)
logging.info("Got mime: %s", req.headers['content-type'])
if not req.headers['content-type'].startswith('text/'):
logging.warn("Disallowed MIME: %s", req.headers['content-type'])
self.set_url_status(data['url'], 'disallowed-mime-type')
return True
logging.info("Got length: %s", req.headers.get('content-length',0))
if int(req.headers.get('content-length',0)) > 262144: # yahoo homepage is 216k!
#TODO: should we test content of GET request when content-length is not available?
logging.warn("Content too large: %s", req.headers['content-length'])
self.set_url_status(data['url'], 'disallowed-content-length')
return True
except Exception,v:
# if anything bad happens, log it but continue
logging.error("HEAD Exception: %s", v)
# pass the message to the regular location
msgsend = amqp.Message(msg.body)
new_key = msg.routing_key.replace(self.config.get('daemon','queue'),'url')
self.ch.basic_publish(msgsend, self.config.get('daemon','exchange'), new_key)
logging.info("Message sent with new key: %s", new_key)
return True
def main():
# set up cache for robots.txt content
cfg = ConfigParser.ConfigParser()
assert(len(cfg.read([args.config])) == 1)
# create MySQL connection
print cfg.has_option('daemon', 'status-queue'), args.receive
if cfg.has_option('daemon', 'status-queue') and not args.receive:
conn = None
else:
import psycopg2
pgopts = dict(cfg.items('db'))
conn = psycopg2.connect(**pgopts)
# Create AMQP connection
amqpopts = dict(cfg.items('amqp'))
amqpconn = amqp.Connection( **amqpopts)
ch = amqpconn.channel()
ch.basic_qos(0, 10, False)
checker = BlockedRobotsTxtChecker(cfg, conn, ch)
# create consumer, enter mainloop
if args.receive:
ch.queue_declare(cfg.get('daemon', 'status-queue'), durable=True, auto_delete=False)
ch.queue_bind( cfg.get('daemon', 'status-queue'), cfg.get('daemon','exchange'), "status")
ch.basic_consume(cfg.get('daemon', 'status-queue'), consumer_tag='receiver1', callback=checker.receive_status)
else:
ch.basic_consume(cfg.get('daemon','queue'), consumer_tag='checker1', callback=checker.check_robots)
while True:
ch.wait()
if __name__ == '__main__':
main()
| openrightsgroup/Blocking-Middleware | backend/queue-services/checker.py | Python | gpl-3.0 | 7,243 |
"""Support for NuHeat thermostats."""
import asyncio
from datetime import timedelta
import logging
import nuheat
import requests
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_DEVICES,
CONF_PASSWORD,
CONF_USERNAME,
HTTP_BAD_REQUEST,
HTTP_INTERNAL_SERVER_ERROR,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import CONF_SERIAL_NUMBER, DOMAIN, PLATFORMS
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_DEVICES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the NuHeat component."""
hass.data.setdefault(DOMAIN, {})
conf = config.get(DOMAIN)
if not conf:
return True
for serial_number in conf[CONF_DEVICES]:
# Since the api currently doesn't permit fetching the serial numbers
# and they have to be specified we create a separate config entry for
# each serial number. This won't increase the number of http
# requests as each thermostat has to be updated anyways.
# This also allows us to validate that the entered valid serial
# numbers and do not end up with a config entry where half of the
# devices work.
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_USERNAME: conf[CONF_USERNAME],
CONF_PASSWORD: conf[CONF_PASSWORD],
CONF_SERIAL_NUMBER: serial_number,
},
)
)
return True
def _get_thermostat(api, serial_number):
"""Authenticate and create the thermostat object."""
api.authenticate()
return api.get_thermostat(serial_number)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up NuHeat from a config entry."""
conf = entry.data
username = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
serial_number = conf[CONF_SERIAL_NUMBER]
api = nuheat.NuHeat(username, password)
try:
thermostat = await hass.async_add_executor_job(
_get_thermostat, api, serial_number
)
except requests.exceptions.Timeout as ex:
raise ConfigEntryNotReady from ex
except requests.exceptions.HTTPError as ex:
if (
ex.response.status_code > HTTP_BAD_REQUEST
and ex.response.status_code < HTTP_INTERNAL_SERVER_ERROR
):
_LOGGER.error("Failed to login to nuheat: %s", ex)
return False
raise ConfigEntryNotReady from ex
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error("Failed to login to nuheat: %s", ex)
return False
async def _async_update_data():
"""Fetch data from API endpoint."""
await hass.async_add_executor_job(thermostat.get_data)
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=f"nuheat {serial_number}",
update_method=_async_update_data,
update_interval=timedelta(minutes=5),
)
hass.data[DOMAIN][entry.entry_id] = (thermostat, coordinator)
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
| tboyce021/home-assistant | homeassistant/components/nuheat/__init__.py | Python | apache-2.0 | 4,342 |
# Author: Nguyen Truong Duy
# Email: [email protected]
#
# Note: All the functions in this module are related to the following papers
#
# 1) "Deterministic Polynomial-Time Algorithms for Designing
# Short DNA Words" by Kao et al.
#
# You can retrieve a copy of this paper at:
# http://arxiv.org/pdf/1201.6358.pdf
#
# 2) (For free-energy constraint C9) "Randomized Fast Design of Short DNA
# Words" by Kao et al.
#
# You can retreive a copy of this paper at:
# http://dl.acm.org/citation.cfm?id=1597047
#
"""
This module contains implementation of various algorithms to generate a set of short DNA words satisfying the following constraints:
1) C1 and C4
2) C1 to C6
3) C1 to C7
4) C1, C2, C3, C7 and C8
5) C1 to C8
6) C1 to C6, and C9
"""
# Self-written modules
import helper
import algo_subroutine
import free_energy_routine
import m_fraction
# Builtin modules
import math
##########################################################################################
# genbinword14sub(n, l, k1, k4, pascalprefixsum):
#
# Assumption: (n, l) must satisfy the condition of Lemma 3 in paper (1), i.e.
# ExpCount(M, k1, k4) > nC2 * (1 + 2 * (k4 - 1)) - 1
# so that such a matrix M can exist. Otherwise, the function will
# throw a Runtime Error
#
# The implementation strictly follows Algorithm 1 presented in paper (1)
#
# Detail:
def genbinword14sub(n, l, k1, k4, pascalprefixsum):
"""
Compute and return a BINARY (k1, k4)-distance matrix M (consisting of characters '0' and '1') of dimension n x l
Inputs:
+ n: the number of DNA strings to generate
+ l: the length of each DNA string to generate
+ k1: parameter of C1 constraint
+ k4: parameter of C4 constraint
+ pascalprefixsum: a 2D array with at least l + 1 rows. Row i has (i + 1) entries. pascalprefixsum[i][j] = sum ( i Choose h ) for h = 0, ..., j.
Output:
+ M: a BINARY (k1, k4)-distance matrix M of dimension n x l
"""
# 'X' stands for unknown
M = [['X'] * l for i in xrange(n)]
numdiff1 = [[0] * (i + 1) for i in xrange(n)]
numdiff4 = [[[0] * n for i in xrange(n)] for k in xrange(k4 - 1)]
prevexpcount = algo_subroutine.compute_expcount_empty(n, l, k1, k4, pascalprefixsum)
guardval = m_fraction.MyFraction(((n * (n - 1)) / 2) * (1 + 2 * (k4 - 1)) - 1)
for strpos in xrange(l):
for strid in xrange(n):
if prevexpcount <= guardval:
raise RuntimeError("Your Algo or Input (n, l) is wrong")
expcount0 = prevexpcount + algo_subroutine.compute_change_in_expcount(M, strpos, strid, '0', 'X', numdiff1, numdiff4, n, l, k1, k4, pascalprefixsum)
expcount1 = m_fraction.MyFraction(2) * prevexpcount - expcount0
if expcount0 >= expcount1:
M[strid][strpos] = '0'
prevexpcount = expcount0
else:
M[strid][strpos] = '1'
prevexpcount = expcount1
algo_subroutine.update_numdiff(M, strpos, strid, 'X', numdiff1, numdiff4, n, l, k4)
return M
##################################################################################
# genbinword14(n, k1, k4):
#
# Algorithm flow:
# + Precompute pascalprefixsum which is
# - a 2D array with at least l + 1 rows. Row i has (i + 1) entries
# - pascalprefixsum[i][j] = sum ( i Choose h ) for h = 0, ..., j
# This is to save computation time in other subroutines.
# + Find the minimum length l (via binary search) that satisfies Lemma 3
# of paper (1)
# + Call the routine genbinword14sub(n, l, k1, k4, pascalprefixsum)
# (implemeted above)
#
# Time complexity: O(n^2 * (max(k1, k4) + log n)^2)
def genbinword14(n, k1, k4):
"""
Compute and return the (k1, k4)-distance matrix M (consisting of characters '0' and '1'). Each row of M can be viewed as a string, and M can be viewed as a set of binary strings satisfying C1 and C4 constraints.
Inputs:
+ n: the number of DNA strings to generate
+ k1: parameter of C1 constraint
+ k4: parameter of C4 constraint
Output:
+ M: a BINARY (k1, k4)-distance matrix M with n rows
"""
initlen = algo_subroutine.initlength14(n, k1, k4)
# Precompute the prefix sum of computations, which will be used a lot later
pascalprefixsum = helper.generate_prefix_sum_pascal_triangle(initlen)
# Find the minimum length that satisfies Lemma 3 of paper (1) by Binary Search
minL = algo_subroutine.binarysearchlength14(n, k1, k4, initlen, pascalprefixsum)
M = genbinword14sub(n, minL, k1, k4, pascalprefixsum)
return M
######################################################################################
# gendnaword14(n, maptypetoparam):
#
# Detail:
# 1) Generate a BINARY (k1, k4) distance matrix M (consisting of characters
# '0' and '1'). Each row in M can be viewed as a string that satisfies
# C1(k1) and C4(k4)
# 2) For each entry in M, change '0' to 'C' and '1' to 'G'.
# 3) Then the list {W(0), ..., W(n - 1)} (W(i) is a string formed by the i-th row of M)
# is a list of DNA words satifying C1 to C4
def gendnaword14(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 and C4 constraints.
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1 and 4 as keys
Output:
+ strlist: a Python list of DNA words satisfying C1 and C4 constraints
Example: to generate a set of 15 DNA words satisfying C1(8) and C4(9), call the function
gendnaword14(15, {1 : 8, 4 : 9 })
"""
if n <= 0:
return []
M = genbinword14(n, maptypetoparam[1], maptypetoparam[4])
helper.change_char_in_mat(M, range(len(M[0])), {'0': 'C', '1': 'G'})
return helper.convert_mat_to_strlist(M, n)
######################################################################################
# gendnaword1to6(n, maptypetoparam):
#
# Implementation is based on Lemma 12 in paper (1)
#
# Detail: (Note that k(i) = maptypetoparam[i])
# 1) Generate a BINARY (k1, k4) distance matrix M (consisting of characters
# '0' and '1'). Each row in M can be viewed as a string that satisfies
# C1(k1) and C4(k4)
# 2) For each entry in M, change '0' to 'A' and '1' to 'T'.
# 3) Let k = max{k2, k3, k5, k6}. Add k copies of 'C' at the beginning of each
# word formed by each row i in M, called such word W(i).
# Then the list {W(0), ..., W(n - 1)} is a list of DNA words satifying C1 to C6
def gendnaword1to6(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 and C6 constraints.
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1, 2, ..., 6 as keys
Output:
+ strlist: a Python list of DNA words satisfying C1 and C6 constraints
Example: to generate a set of 25 DNA words satisfying C1(8), C2(4), C3(5), C4(7), C5(8), C6(10), call the function
gendnaword1to6(25, {1 : 8, 2 : 4, 3 : 5, 4 : 7, 5 : 8, 6 : 10})
"""
if n <= 0:
return []
M = genbinword14(n, maptypetoparam[1], maptypetoparam[4])
# In M, change '0' to 'A' and '1' to 'T'
helper.change_char_in_mat(M, range(len(M[0])), {'0': 'A', '1' : 'T'})
k = max(maptypetoparam[2], maptypetoparam[3], maptypetoparam[5], maptypetoparam[6])
leadingstr = 'C' * k
# Append k = max(k2, k3, k5, k6) to the start of each row in M
strlist = []
for row in xrange(n):
strlist.append(leadingstr + ''.join(M[row]))
return strlist
######################################################################################
# gendnaword1to7(n, maptypetoparam):
#
# Implementation is based on Lemma 14 in paper (1)
#
# Detail:
# 1) Generate a BINARY (k1, k4) distance matrix M (consisting of characters
# '0' and '1'). Each row in M can be viewed as a string that satisfies
# C1(k1) and C4(k4)
# 2) Let k = max{k2, k3, k5, k6} (k(i) = maptypetoparam[i]).
# For each row of M, add k copies of '1' at the beginning and k copies of '1'
# at the end.
# 3) Let l be the new length of each row in M now. Let gamma = maptypetoparam[7].
# Assume 0 <= gamma <= 1. Choose randomly a subset of size ceil(gamma * l) of
# the set {0, ..., l - 1}, let it be P
# 4) For each row of M, for positions in P, change '0' to 'C' and '1' to 'G'.
# For the remaining positions NOT in P, change '0' to 'A' and '1' to 'T'
# 5) The set of strings formed by rows in the new M satifies C1 to C7.
def gendnaword1to7(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 and C7 constraints.
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1, 2, ..., 7 as keys.
Outputs:
+ strlist: a Python list of DNA words satisfying C1 and C7 constraints
Example: to generate a set of 25 DNA words satisfying C1(8), C2(4), C3(5), C4(7), C5(8), C6(10), C7(0.7), call the function
gendnaword1to7(25, {1 : 8, 2 : 4, 3 : 5, 4 : 7, 5 : 8, 6 : 10, 7 : 0.7})
"""
if n <= 0:
return []
if maptypetoparam[7] > 1:
return []
M = genbinword14(n, maptypetoparam[1], maptypetoparam[4])
k = max(maptypetoparam[2], maptypetoparam[3], maptypetoparam[5], maptypetoparam[6])
l = len(M[0]) + k + k
chosencolumn = []
if int(maptypetoparam[7]) == 1:
chosencolumn = range(l)
else:
chosencolumn = helper.choose_random_pos_list(l, int(math.ceil(maptypetoparam[7] * l)))
allcolumn = range(l)
strlist = []
for row in xrange(n):
# Append k instances of '1' at the beginning and the end of each row in M
newlist = ['1'] * k
newlist.extend(M[row])
newlist.extend(['1'] * k)
helper.change_char_in_mat([newlist], chosencolumn, {'0': 'C', '1': 'G'})
helper.change_char_in_mat([newlist], allcolumn, {'0': 'A', '1': 'T'})
strlist.append(''.join(newlist))
return strlist
######################################################################################
# gendnaword12378(n, maptypetoparam):
#
# Implementation is based on Lemma 16 in paper (1)
#
# Detail: (Note that k(i) = maptypetoparam[i])
# 1) Generate a BINARY (k1, 1) distance matrix M (consisting of characters
# '0' and '1'). Each row in M can be viewed as a string that satisfies
# C1(k1) and C4(1). This is essentially equivalent to the fact that each
# row of M satisfy C1(k1) only.
# 2) Let l0 be the number of columns in M. If l0 is odd, we append '0' at the
# end of each row in M.
# 3) Let k = max(k2, k3).
# For each row of M, add k copies of '1' at the beginning and k copies of '1'
# at the end.
# 4) Let S be a list of strings formed by rows in M. Apply breakrun function to
# each string in S so that all strings in S do not have runs of the same
# characters longer than maptypetoparam[8].
# 5) Let l be the new length of each string in S after Step 4. Let gamma = maptypetoparam[7].
# Assume 0 <= gamma <= 1. Choose randomly a subset of size ceil(gamma * l) of
# the set {0, ..., l - 1}, let it be P
# 6) For each string in S, for positions in P, change '0' to 'C' and '1' to 'G'.
# For the remaining positions NOT in P, change '0' to 'A' and '1' to 'T'
# 7) The new list S contains DNA words satisfying C1, C2, C3, C7 and C8 constraints.
def gendnaword12378(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1, C2, C3, C7 and C8 constraints.
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1, 2, 3, 7 and 8 as keys.
Output:
+ strlist: a Python list of DNA words satisfying C1, C2, C3, C7 and C8 constraints
Example: to generate a set of 50 DNA words satisfying C1(8), C2(4), C3(5), C7(0.7), C8(3), call the function
gendnaword12378(50, {1 : 8, 2 : 4, 3 : 5, 7 : 0.7, 8 : 3})
"""
if n <= 0:
return []
if maptypetoparam[7] > 1:
return []
M = genbinword14(n, maptypetoparam[1], 1)
l0 = len(M[0])
if l0 & 1:
# If l0 is odd, append '0' at the end of every word in M so that
# the new length is even
for row in xrange(n):
M[row].append('0')
strlist = []
k = max(maptypetoparam[2], maptypetoparam[3])
for row in xrange(n):
newlist = ['1'] * k
newlist.extend(M[row])
newlist.extend(['1'] * k)
strlist.append(''.join(newlist))
# Break run
for strid in xrange(n):
strlist[strid] = algo_subroutine.breakrun(strlist[strid], maptypetoparam[8])
newlen = len(strlist[0])
chosencolumn = []
allcolumn = range(newlen)
if int(maptypetoparam[7]) == 1:
chosencolumn = range(newlen)
else:
chosencolumn = helper.choose_random_pos_list(newlen, int(math.ceil(maptypetoparam[7] * newlen)))
for strid in xrange(n):
curlist = list(strlist[strid])
helper.change_char_in_mat([curlist], chosencolumn, {'0': 'C', '1': 'G'})
helper.change_char_in_mat([curlist], allcolumn, {'0': 'A', '1':'T'})
strlist[strid] = ''.join(curlist)
return strlist
#######################################################################################
# gendnaword1to8(n, maptypetoparam):
#
# Implementation is a combination of
# + gendnaword1to8algo2 function (implemented based on Lemma 20 and Theorem 21 in paper (1))
# + gendnaword1to8algo1 function (implemented based on Lemma 18 and Theorem 19 in paper (1))
#
# Description:
# + If 1 / (d + 1) <= gamma <= d / (d + 1), we will use gendnaword1to8algo1 to generate
# the set of DNA words. It is because generally gendnaword1to8algo1 produces shorter words
# + Otherwise, if d >= 2, we will use gendnaword1to8algo2
# + Otherwise, a RuntimeError will be thrown
def gendnaword1to8(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 through C8 constraints.
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1, 2, ..., 8 as keys.
Output:
+ strlist: a Python list of DNA words satisfying C1 through C8 constraints
Example: to generate a set of 50 DNA words satisfying C1(8), C2(4), C3(5), C4(7), C5(8), C6(10), C7(0.7), C8(3), call the function
gendnaword1to8(50, {1 : 8, 2 : 4, 3 : 5, 4 : 7, 5 : 8, 6 : 10, 7 : 0.7, 8 : 3})
"""
if n <= 0:
return []
gamma = maptypetoparam[7]
if gamma > 1:
return []
d = maptypetoparam[8]
if d < 2:
raise RuntimeError("gendnaword1to8algo2 only works with maxlenrun >= 2")
if (1.0 / (d + 1) > gamma) or (gamma > d * 1.0 / (d + 1)):
return gendnaword1to8algo2(n, maptypetoparam)
return gendnaword1to8algo1(n, maptypetoparam)
########################################################################################
# gendnaword1to8algo2(n, maptypetoparam):
#
# The implementation is based on Lemma 20 and Theorem 21 in paper (1)
#
# Detail: (Note that k(i) = maptypetoparam[i] and d = maptypetoparam[8])
# 1) Generate a BINARY (max(k1, k4), 1) distance matrix M (consisting of characters
# '0' and '1'). Each row in M can be viewed as a string that satisfies
# C1(max(k1, k4)) and C4(1). This is essentially equivalent to the fact that each
# row of M satisfy C1(max(k1, k4)) only.
# 2) For each row in M, after every d - 1 characters, or when we reach the last character
# of a row, insert a new character which is the complementary ('0' is complementary to '1',
# and vice versa) to the immediate previous character.
# 3) Add 1 copy of '1' at the beginning and 1 copy of '0' at the end of each row in M.
# 4) Let k = max{k2, k3, k4, k5, k6}. For each row of M, add
# ceil(k / d) copies of the length-d string 11...10 at the beginning and the end of each
# row in M.
# 5) Let l be the new length of each row in M now (or the number of columns).
# Let gamma = maptypetoparam[7]. Assume 0 <= gamma <= 1.
# Choose randomly a subset of size ceil(gamma * l) of the set {0, ..., l - 1}, let it be P
# 6) For each row of M, for positions in P, change '0' to 'C' and '1' to 'G'.
# For the remaining positions NOT in P, change '0' to 'A' and '1' to 'T'
# 7) The list of strings formed by rows in the new M satifies C1 through C8.
def gendnaword1to8algo2(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 through C8 constraints.
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1, 2, ..., 8 as keys.
Output:
+ strlist: a Python list of DNA words satisfying C1 through C8 constraints
Exception:
+ A RuntimeError will be raised if maptypetoparam[8] < 2
"""
if n <= 0:
return []
if maptypetoparam[7] > 1:
return []
if maptypetoparam[8] < 2:
raise RuntimeError("gendnaword1to8algo2 only works with maxlenrun >= 2")
# Generate the set of strings satisfies C1 constraint
M = genbinword14(n, max(maptypetoparam[1], maptypetoparam[4]), 1)
k = max([maptypetoparam[i] for i in xrange(2, 6 + 1)])
newM = []
l0 = len(M[0])
# Prepare the 'string' (list of characters) used later
baselist = ['1'] * (maptypetoparam[8] - 1)
baselist.append('0')
numtime = int(math.ceil(k * 1.0 / maptypetoparam[8]))
supplist = baselist * numtime
for row in xrange(n):
newrow = []
newrow.extend(supplist)
newrow.append('1')
sublen = 0
for ind in xrange(l0):
newrow.append(M[row][ind])
sublen += 1
if (sublen == maptypetoparam[8] - 1) or (ind == l0 - 1):
newrow.append(helper.get_complement_letter(M[row][ind]))
sublen = 0
newrow.append('0')
newrow.extend(supplist)
newM.append(newrow)
newlen = len(newM[0])
allcolumn = range(newlen)
if maptypetoparam[7] == 1:
chosencolumn = range(newlen)
else:
chosencolumn = helper.choose_random_pos_list(newlen, int(math.ceil(maptypetoparam[7] * newlen)))
helper.change_char_in_mat(newM, chosencolumn, {'0': 'C', '1': 'G'})
helper.change_char_in_mat(newM, allcolumn, {'0': 'A', '1': 'T'})
return helper.convert_mat_to_strlist(newM, n)
########################################################################################
# gendnaword1to8algo1(n, maptypetoparam):
#
# Assumption: Let gamma = maptypetoparam[7] and d = maptypetoparam[8]. We must have
# 1 / (d + 1) <= gamma <= d / (d + 1)
# Otherwise, an RunTimeError will be raised
#
# The implementation is based on Lemma 18 and Theorem 19 in paper (1)
#
# Detail: (Note that d = maptypetoparam[8])
# 1) Generate a BINARY (max(k1, k4), 1) distance matrix M (consisting of characters
# '0' and '1'). Each row in M can be viewed as a string that satisfies
# C1(max(k1, k4)) and C4(1). This is essentially equivalent to the fact that each
# row of M satisfy C1(max(k1, k4)) only.
# 2) Let k = max{k2, k3, k4, k5, k6}.
# For each row of M, add k copies of '1' at the beginning and k copies of '1'
# at the end.
# 3) Let l be the new length of rows in M (or the number of columns in M).
# Partition the integer interval [0, l - 1] into subintervals Z(1), Z(2), ..., Z(s)
# for some s such that
# (1) Each subinterval consists of at most d integers and at least one integer
# (2) The total number of integers in the odd-indexed subintervals is
# floor(newgamma * l) where newgamma = max(gamma, 1 - gamma)
#
# The step is not straightforward to implement. So we describe how it is implemented in
# function
# + Let newgamma = max(gamma, 1 - gamma)
# + Since 1 / (d + 1) <= gamma <= d / (d + 1), we have
# 1/2 <= newgamma <= d / (d + 1)
# + Let numchoose = floor(newgamma * l) and numnotchoose = l - numchoose
# + Let q = floor(numchoose / numnotchoose) and r = numchoose % numnotchoose
# + It can be shown that 1 <= q <= d. And when q = d, r = 0
# + Instead of finding Z(1), ..., Z(s) explicitly, we just need to maintain 2 list
# - oddlist is the union odd-indexed subintervals Z(1), Z(3), ...
# - evenlist is the union of even-indexed subintervals Z(2), Z(4), ...
# + Starting with i = 0, we repeated the following procedure:
# - Add the next q integers including i (i.e. i, i + 1, ..., i + q - 1)
# or add until we reach l - 1 into oddlist
# - If we reach (add) l - 1, stop the procedure.
# - Otherwise, if r is positive, add the next integer, i.e. i + q into oddlist,
# and decrement r
# - Add the next integer (if it is less than l) into evenlist
# - Update i for the next iteration
#
# 4) For each row in M:
# (1) If gamma < 0.5:
# + For entries whose position is in an odd-indexed subinterval, change '0' to 'A',
# '1' to 'T'.
# + For entries whose position is in an even-indexed subinterval, change '0' to 'C',
# '1' to 'G'.
# (2) If gamma >= 0.5:
# + For entries whose position is in an odd-indexed subinterval, change '0' to 'C',
# '1' to 'G'.
# + For entries whose position is in an even-indexed subinterval, change '0' to 'A',
# '1' to 'T'.
# 5) The list of strings formed by rows in the new M satisfies C1 through C8.
def gendnaword1to8algo1(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 through C8 constraints.
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1, 2, ..., 8 as keys.
Output:
+ strlist: a Python list of DNA words satisfying C1 through C8 constraints
Exception:
+ A RuntimeError will be raised if the following condition is NOT satified:
1 / (d + 1) <= gamma <= d / (d + 1)
where gamma = maptypetoparam[7] and d = maptypetoparam[8]
"""
gamma = maptypetoparam[7]
if n <= 0 or gamma > 1:
return []
d = maptypetoparam[8]
if (1.0 / (d + 1) > gamma) or (gamma > d * 1.0 / (d + 1)):
raise RuntimeError("gendnaword1to8algo1 works only if 1 / (d + 1) <= gamma <= d / (d + 1)")
# Generate the set of strings satisfies C1 constraint
M = genbinword14(n, max(maptypetoparam[1], maptypetoparam[4]), 1)
k = max([maptypetoparam[i] for i in xrange(2, 6 + 1)])
newM = []
for row in xrange(n):
newrow = []
newrow.extend(['1'] * k)
newrow.extend(M[row])
newrow.extend(['1'] * k)
newM.append(newrow)
# Find oddlist and evenlist as mentioned in Step 3 (see comments above)
newlen = len(newM[0])
newgamma = gamma
if newgamma < 0.5:
newgamma = 1 - newgamma
numchoose = int(newgamma * newlen)
numnotchoose = newlen - numchoose
minoddsize = int(numchoose * 1.0 / numnotchoose)
numleft = numchoose % numnotchoose
oddlist = []
evenlist = []
ind = 0
oddsize = 0
while ind < newlen:
oddlist.append(ind)
ind += 1
oddsize += 1
if ind < newlen and oddsize == minoddsize:
if numleft != 0:
oddlist.append(ind)
numleft -= 1
ind += 1
if ind < newlen:
evenlist.append(ind)
ind += 1
oddsize = 0
# Convert binary words into DNA words
if gamma < 0.5:
helper.change_char_in_mat(newM, oddlist, {'0': 'A', '1': 'T'})
helper.change_char_in_mat(newM, evenlist, {'0': 'C', '1': 'G'})
else:
helper.change_char_in_mat(newM, oddlist, {'0': 'C', '1': 'G'})
helper.change_char_in_mat(newM, evenlist, {'0': 'A', '1': 'T'})
return helper.convert_mat_to_strlist(newM, n)
##############################################################################################
# gendnaword1to6And9(n, maptypetoparam):
#
# Refer to gendnaword1to6and9generic comments
#
# The function we use to construct bounded energy DNA strings here is
# free_energy_routine.construct_bounded_energy_dna_list, which employs dynamic programming
# approach
#
def gendnaword1to6and9(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 to C6 with specified parameters, and C9(4D + Gamma_max) constraints where
- Gamma_max and Gamma_min are the largest and smallest among 16 entries of maptypetoparam[9].
- D = Gamma_max - Gamma_min
In this implementation, we use dynamic programming to solve the bounded energy strand generation problem.
See gendnaword1to6and9generic for more information
"""
return gendnaword1to6and9generic(n, maptypetoparam, free_energy_routine.construct_bounded_energy_dna_list)
##############################################################################################
# gendnaword1to6and9poly(n, maptypetoparam)
#
# Refer to gendnaword1to6and9generic comments
#
# The function we use to construct bounded energy DNA strings here is
# construct_bounded_energy_dna_list_poly, which employs polynomial multiplication and
# generating function approach
#
def gendnaword1to6and9poly(n, maptypetoparam):
"""
Generate and return a set of DNA words satisfying C1 to C6 with specified parameters, and C9(4D + Gamma_max) constraints where
- Gamma_max and Gamma_min are the largest and smallest among 16 entries of maptypetoparam[9].
- D = Gamma_max - Gamma_min
In this implementation, we use an approach of polynomial multiplication and generating function to solve the bounded energy strand generation problem.
See gendnaword1to6and9generic for more information
"""
return gendnaword1to6and9generic(n, maptypetoparam, free_energy_routine.construct_bounded_energy_dna_list_poly)
##############################################################################################
# gendnaword1to6and9generic(n, maptypetoparam, construct_bounded_energy_dna):
# + Generate and return a set of DNA words satisfying C1 to C6, and C9 constraints.
#
# Input specfication:
# + n: the number of strings to generate
# + maptypetoparam: a dictionary that maps an integer (representing
# the constraint type) to the parameter corresponding to that
# constraint type.
# In this function, maptypetoparam must contain
# maptypetoparam[i] = k(i) (an integer for C(i) constraint)
# for i = 1, ..., 6
#
# maptypetoparam[9]: is a 4 x 4 matrix where rows and columns are indexed
# by the list ['A', 'C', 'G', 'T'] in that order, presenting the pairwise free energy values.
# For example, if
# maptypetoparam[9] = M = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 16, 16]]
# Then the free energy between 'T' and 'T' is M[3][3] = 16, and
# the free energy between 'A' and 'G' is M[0][2] = 3, and
# the free energy between 'G' and 'A' is M[2][0] = 9, etc.
# All 16 entries of maptypetoparam[9] should be non-negative integers!
#
# + construct_bounded_energy_dna: a function to generate a list of string with specified length
# and that has free energy bounded between the specified interval.
#
# Implementation is based on Lemma 5, 6, 7, 8 of paper (2)
#
# Detail:
# 1) Generate a list S of DNA words satisfying C1 through C6 constraints by calling gendnaword1to6
# 2) Let femax and femin be the largest and smallest free energy of DNA words in S.
# 3) If femax - femin <= 4 * D + Gamma_max, return S.
# 4) Otherwise, let l the length of strings in S.
# Find feminstr which is the minimum free energy among all DNA words of length 2l
# 5) Let alpha = femax + feminstr, and beta = alpha + 2 * D. For each string S(i) in S, construct
# a DNA word T(i) with length 2l and has free energy between alpha - FE(S(i)) and beta - FE(S(i))
# (FE(W) denotes the free energy of a word W). This can be done by calling
# construct_bounded_energy_dna
# 6) Let U(i) be the string constructed by concatenating T(i)[0 ... l - 1] (the first half of T(i))
# and S(i) and T(i)[l ... 2l - 1] (the second half of T(i))
# 7) The list of strings U(0), ..., U(n - 1) satisfying C1 through C6 constraints, and
# C9(4 * D + Gamma_max)
def gendnaword1to6and9generic(n, maptypetoparam, construct_bounded_energy_dna):
"""
Generate and return a set of DNA words satisfying C1 to C6 with specified parameters, and C9(4D + Gamma_max) constraints where
- Gamma_max and Gamma_min are the largest and smallest among 16 entries of maptypetoparam[9].
- D = Gamma_max - Gamma_min
Inputs:
+ n: the number of strings to generate
+ maptypetoparam: a dictionary that maps an integer (representing the constraint type) to the parameter corresponding to that constraint type. It must have 1, 2, ..., 6 and 9 as keys. Note that maptypetoparam[9] is a 4 x 4 matrix of non-negative integers where rows and columns are indexed by typeArr = ['A', 'C', 'G', 'T'] in that order, presenting the pairwise free energy values.
+ construct_bounded_energy_dna: a function to solve the bounded energy strand generation problem. It generates a list of string with specified length and that has free energy bounded between the specified interval.
Output:
+ strlist: a Python list of DNA words satisfying C1 through C6 constraints, and C9(4D + Gamma_max)
Example:
+ The input pairwise free energy matrix M is given by
M = [[5, 4, 6, 1], [2, 10, 3, 4], [6, 11, 5, 8], [1, 3, 4, 8]].
+ To generate a list of 95 DNA words satisfying C1(3), C2(6), C3(2), C4(7), C5(8), C6(9) and C9 constraint, call the function:
strlist = gen_dna_words(95, {1: 3, 2: 6, 3: 2, 4: 7, 5: 8, 6: 9, 9: M})
"""
# Generate a set of words satisfying C1 to C6 constraint
strlist16 = gendnaword1to6(n, maptypetoparam)
# Create the pairwise energy function from the 4 x 4 matrices
pairwiseenergy = free_energy_routine.create_pairwise_energy_func(maptypetoparam[9])
maxpairwise = free_energy_routine.find_extreme_pairwise_energy(pairwiseenergy, max)
minpairwise = free_energy_routine.find_extreme_pairwise_energy(pairwiseenergy, min)
D = maxpairwise - minpairwise
# Compute the free energy of each string
freeenergylist = [free_energy_routine.compute_free_energy(strlist16[i], pairwiseenergy) for i in xrange(n)]
femax = max(freeenergylist)
femin = min(freeenergylist)
if femax - femin <= 4 * D + maxpairwise:
return strlist16
newlen = len(strlist16[0])
newlen += newlen
alpha = femax + free_energy_routine.compute_min_free_energy(newlen, pairwiseenergy)
beta = alpha + D + D
triplelist = [(newlen, alpha - freeenergylist[i], beta - freeenergylist[i]) for i in xrange(n)]
# Find the list of bounded energy DNA strings
boundedestrlist = construct_bounded_energy_dna(triplelist, pairwiseenergy)
strlist = []
halflen = newlen / 2
for ind in xrange(n):
newstr = boundedestrlist[ind][0 : halflen] + strlist16[ind] + boundedestrlist[ind][halflen : newlen]
strlist.append(newstr)
return strlist
| truongduy134/DNA-Word-Design | src/algo_routine.py | Python | gpl-2.0 | 32,425 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_concurrency import processutils
from oslo_service import service
from oslo_service import wsgi
from ironic.api import app
from ironic.common import exception
from ironic.common.i18n import _
from ironic.conf import CONF
class WSGIService(service.ServiceBase):
"""Provides ability to launch ironic API from wsgi app."""
def __init__(self, name, use_ssl=False):
"""Initialize, but do not start the WSGI server.
:param name: The name of the WSGI server given to the loader.
:param use_ssl: Wraps the socket in an SSL context if True.
:returns: None
"""
self.name = name
self.app = app.VersionSelectorApplication()
self.workers = (CONF.api.api_workers or
processutils.get_worker_count())
if self.workers and self.workers < 1:
raise exception.ConfigInvalid(
_("api_workers value of %d is invalid, "
"must be greater than 0.") % self.workers)
self.server = wsgi.Server(CONF, name, self.app,
host=CONF.api.host_ip,
port=CONF.api.port,
use_ssl=use_ssl)
def start(self):
"""Start serving this service using loaded configuration.
:returns: None
"""
self.server.start()
def stop(self):
"""Stop serving this API.
:returns: None
"""
self.server.stop()
def wait(self):
"""Wait for the service to stop serving this API.
:returns: None
"""
self.server.wait()
def reset(self):
"""Reset server greenpool size to default.
:returns: None
"""
self.server.reset()
| SauloAislan/ironic | ironic/common/wsgi_service.py | Python | apache-2.0 | 2,316 |
from django.contrib import admin
from cl.stats.models import Stat, Event
@admin.register(Stat)
class StatAdmin(admin.ModelAdmin):
fields = ('name', 'date_logged', 'count')
@admin.register(Event)
class EventAdmin(admin.ModelAdmin):
list_display = ('__str__', 'user', 'date_created', 'description')
list_filter = ('user',)
readonly_fields = ('date_created',)
date_hierarchy = 'date_created'
ordering = ('-date_created',)
search_fields = ('id', 'description', 'user__username')
| voutilad/courtlistener | cl/stats/admin.py | Python | agpl-3.0 | 508 |
import sys
import os
TEST_DIR = sys.path[0]
sys.path.insert(0,os.getcwd())
import unittest
from StockGainCalculatorApp import StockGainCalculatorGui
from transaction import Transaction
import Tkinter
from Tkinter import Tk
from decimal import Decimal
from datetime import datetime
class StockGainCalculatorAppTest(unittest.TestCase):
def setUp(self):
win = Tk()
win.title('StockGainCalculator Testing')
self.app = StockGainCalculatorGui(win)
self.app.pack(padx=2,pady=2,expand=Tkinter.YES,fill=Tkinter.BOTH)
def testAddTransaction(self):
app = self.app
app.clear_transactions()
t1 = Transaction("xom",True,100,Decimal(61), Decimal(6200), datetime(2010,1,1,12,30,1))
t2 = Transaction("xom",False,100,Decimal(65), Decimal(6400), datetime(2010,1,1,13,30,1))
app.add_transaction(t1)
app.add_transaction(t2)
app.process_transactions()
self.assertEqual(Decimal(200), app.get_short_term_gain(2010))
self.assertEqual(Decimal(0),app.get_long_term_gain(2010))
self.assertEqual(Decimal(0), app.get_short_term_gain(2009))
if __name__ == '__main__':
unittest.main()
| hermantai/beta-programs | StockGainCalculator/tests/test_StockGainCalculatorApp.py | Python | apache-2.0 | 1,181 |
import asyncio # noqa
import collections.abc # noqa
import datetime
import enum
import json
import math
import time
import warnings
import zlib
from concurrent.futures import Executor
from email.utils import parsedate
from http.cookies import SimpleCookie
from typing import ( # noqa
TYPE_CHECKING,
Any,
Dict,
Iterator,
Mapping,
MutableMapping,
Optional,
Tuple,
Union,
cast,
)
from multidict import CIMultiDict, istr
from . import hdrs, payload
from .abc import AbstractStreamWriter
from .helpers import HeadersMixin, rfc822_formatted_time, sentinel
from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
from .payload import Payload
from .typedefs import JSONEncoder, LooseHeaders
__all__ = ('ContentCoding', 'StreamResponse', 'Response', 'json_response')
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest # noqa
BaseClass = MutableMapping[str, Any]
else:
BaseClass = collections.abc.MutableMapping
class ContentCoding(enum.Enum):
# The content codings that we have support for.
#
# Additional registered codings are listed at:
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
deflate = 'deflate'
gzip = 'gzip'
identity = 'identity'
############################################################
# HTTP Response classes
############################################################
class StreamResponse(BaseClass, HeadersMixin):
_length_check = True
def __init__(self, *,
status: int=200,
reason: Optional[str]=None,
headers: Optional[LooseHeaders]=None) -> None:
self._body = None
self._keep_alive = None # type: Optional[bool]
self._chunked = False
self._compression = False
self._compression_force = None # type: Optional[ContentCoding]
self._cookies = SimpleCookie()
self._req = None # type: Optional[BaseRequest]
self._payload_writer = None # type: Optional[AbstractStreamWriter]
self._eof_sent = False
self._body_length = 0
self._state = {} # type: Dict[str, Any]
if headers is not None:
self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
else:
self._headers = CIMultiDict() # type: CIMultiDict[str]
self.set_status(status, reason)
@property
def prepared(self) -> bool:
return self._payload_writer is not None
@property
def task(self) -> 'asyncio.Task[None]':
return getattr(self._req, 'task', None)
@property
def status(self) -> int:
return self._status
@property
def chunked(self) -> bool:
return self._chunked
@property
def compression(self) -> bool:
return self._compression
@property
def reason(self) -> str:
return self._reason
def set_status(self, status: int,
reason: Optional[str]=None,
_RESPONSES: Mapping[int,
Tuple[str, str]]=RESPONSES) -> None:
assert not self.prepared, \
'Cannot change the response status code after ' \
'the headers have been sent'
self._status = int(status)
if reason is None:
try:
reason = _RESPONSES[self._status][0]
except Exception:
reason = ''
self._reason = reason
@property
def keep_alive(self) -> Optional[bool]:
return self._keep_alive
def force_close(self) -> None:
self._keep_alive = False
@property
def body_length(self) -> int:
return self._body_length
@property
def output_length(self) -> int:
warnings.warn('output_length is deprecated', DeprecationWarning)
assert self._payload_writer
return self._payload_writer.buffer_size
def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None:
"""Enables automatic chunked transfer encoding."""
self._chunked = True
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError("You can't enable chunked encoding when "
"a content length is set")
if chunk_size is not None:
warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
def enable_compression(self,
force: Optional[Union[bool, ContentCoding]]=None
) -> None:
"""Enables response compression encoding."""
# Backwards compatibility for when force was a bool <0.17.
if type(force) == bool:
force = ContentCoding.deflate if force else ContentCoding.identity
warnings.warn("Using boolean for force is deprecated #3318",
DeprecationWarning)
elif force is not None:
assert isinstance(force, ContentCoding), ("force should one of "
"None, bool or "
"ContentEncoding")
self._compression = True
self._compression_force = force
@property
def headers(self) -> 'CIMultiDict[str]':
return self._headers
@property
def cookies(self) -> SimpleCookie:
return self._cookies
def set_cookie(self, name: str, value: str, *,
expires: Optional[str]=None,
domain: Optional[str]=None,
max_age: Optional[Union[int, str]]=None,
path: str='/',
secure: Optional[str]=None,
httponly: Optional[str]=None,
version: Optional[str]=None) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == '':
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c['expires'] = expires
elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT':
del c['expires']
if domain is not None:
c['domain'] = domain
if max_age is not None:
c['max-age'] = str(max_age)
elif 'max-age' in c:
del c['max-age']
c['path'] = path
if secure is not None:
c['secure'] = secure
if httponly is not None:
c['httponly'] = httponly
if version is not None:
c['version'] = version
def del_cookie(self, name: str, *,
domain: Optional[str]=None,
path: str='/') -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(name, '', max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain, path=path)
@property
def content_length(self) -> Optional[int]:
# Just a placeholder for adding setter
return super().content_length
@content_length.setter
def content_length(self, value: Optional[int]) -> None:
if value is not None:
value = int(value)
if self._chunked:
raise RuntimeError("You can't set content length when "
"chunked encoding is enable")
self._headers[hdrs.CONTENT_LENGTH] = str(value)
else:
self._headers.pop(hdrs.CONTENT_LENGTH, None)
@property
def content_type(self) -> str:
# Just a placeholder for adding setter
return super().content_type
@content_type.setter
def content_type(self, value: str) -> None:
self.content_type # read header values if needed
self._content_type = str(value)
self._generate_content_type_header()
@property
def charset(self) -> Optional[str]:
# Just a placeholder for adding setter
return super().charset
@charset.setter
def charset(self, value: Optional[str]) -> None:
ctype = self.content_type # read header values if needed
if ctype == 'application/octet-stream':
raise RuntimeError("Setting charset for application/octet-stream "
"doesn't make sense, setup content_type first")
assert self._content_dict is not None
if value is None:
self._content_dict.pop('charset', None)
else:
self._content_dict['charset'] = str(value).lower()
self._generate_content_type_header()
@property
def last_modified(self) -> Optional[datetime.datetime]:
"""The value of Last-Modified HTTP header, or None.
This header is represented as a `datetime` object.
"""
httpdate = self._headers.get(hdrs.LAST_MODIFIED)
if httpdate is not None:
timetuple = parsedate(httpdate)
if timetuple is not None:
return datetime.datetime(*timetuple[:6],
tzinfo=datetime.timezone.utc)
return None
@last_modified.setter
def last_modified(self,
value: Optional[
Union[int, float, datetime.datetime, str]]) -> None:
if value is None:
self._headers.pop(hdrs.LAST_MODIFIED, None)
elif isinstance(value, (int, float)):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)))
elif isinstance(value, datetime.datetime):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple())
elif isinstance(value, str):
self._headers[hdrs.LAST_MODIFIED] = value
def _generate_content_type_header(
self,
CONTENT_TYPE: istr=hdrs.CONTENT_TYPE) -> None:
assert self._content_dict is not None
assert self._content_type is not None
params = '; '.join("{}={}".format(k, v)
for k, v in self._content_dict.items())
if params:
ctype = self._content_type + '; ' + params
else:
ctype = self._content_type
self._headers[CONTENT_TYPE] = ctype
async def _do_start_compression(self, coding: ContentCoding) -> None:
if coding != ContentCoding.identity:
assert self._payload_writer is not None
self._headers[hdrs.CONTENT_ENCODING] = coding.value
self._payload_writer.enable_compression(coding.value)
# Compressed payload may have different content length,
# remove the header
self._headers.popall(hdrs.CONTENT_LENGTH, None)
async def _start_compression(self, request: 'BaseRequest') -> None:
if self._compression_force:
await self._do_start_compression(self._compression_force)
else:
accept_encoding = request.headers.get(
hdrs.ACCEPT_ENCODING, '').lower()
for coding in ContentCoding:
if coding.value in accept_encoding:
await self._do_start_compression(coding)
return
async def prepare(
self,
request: 'BaseRequest'
) -> Optional[AbstractStreamWriter]:
if self._eof_sent:
return None
if self._payload_writer is not None:
return self._payload_writer
await request._prepare_hook(self)
return await self._start(request)
async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter:
self._req = request
keep_alive = self._keep_alive
if keep_alive is None:
keep_alive = request.keep_alive
self._keep_alive = keep_alive
version = request.version
writer = self._payload_writer = request._payload_writer
headers = self._headers
for cookie in self._cookies.values():
value = cookie.output(header='')[1:]
headers.add(hdrs.SET_COOKIE, value)
if self._compression:
await self._start_compression(request)
if self._chunked:
if version != HttpVersion11:
raise RuntimeError(
"Using chunked encoding is forbidden "
"for HTTP/{0.major}.{0.minor}".format(request.version))
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = 'chunked'
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
elif self._length_check:
writer.length = self.content_length
if writer.length is None:
if version >= HttpVersion11:
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = 'chunked'
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
else:
keep_alive = False
headers.setdefault(hdrs.CONTENT_TYPE, 'application/octet-stream')
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
# connection header
if hdrs.CONNECTION not in headers:
if keep_alive:
if version == HttpVersion10:
headers[hdrs.CONNECTION] = 'keep-alive'
else:
if version == HttpVersion11:
headers[hdrs.CONNECTION] = 'close'
# status line
status_line = 'HTTP/{}.{} {} {}'.format(
version[0], version[1], self._status, self._reason)
await writer.write_headers(status_line, headers)
return writer
async def write(self, data: bytes) -> None:
assert isinstance(data, (bytes, bytearray, memoryview)), \
"data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
raise RuntimeError("Cannot call write() after write_eof()")
if self._payload_writer is None:
raise RuntimeError("Cannot call write() before prepare()")
await self._payload_writer.write(data)
async def drain(self) -> None:
assert not self._eof_sent, "EOF has already been sent"
assert self._payload_writer is not None, \
"Response has not been started"
warnings.warn("drain method is deprecated, use await resp.write()",
DeprecationWarning,
stacklevel=2)
await self._payload_writer.drain()
async def write_eof(self, data: bytes=b'') -> None:
assert isinstance(data, (bytes, bytearray, memoryview)), \
"data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
return
assert self._payload_writer is not None, \
"Response has not been started"
await self._payload_writer.write_eof(data)
self._eof_sent = True
self._req = None
self._body_length = self._payload_writer.output_size
self._payload_writer = None
def __repr__(self) -> str:
if self._eof_sent:
info = "eof"
elif self.prepared:
assert self._req is not None
info = "{} {} ".format(self._req.method, self._req.path)
else:
info = "not prepared"
return "<{} {} {}>".format(self.__class__.__name__,
self.reason, info)
def __getitem__(self, key: str) -> Any:
return self._state[key]
def __setitem__(self, key: str, value: Any) -> None:
self._state[key] = value
def __delitem__(self, key: str) -> None:
del self._state[key]
def __len__(self) -> int:
return len(self._state)
def __iter__(self) -> Iterator[str]:
return iter(self._state)
def __hash__(self) -> int:
return hash(id(self))
def __eq__(self, other: object) -> bool:
return self is other
class Response(StreamResponse):
def __init__(self, *,
body: Any=None,
status: int=200,
reason: Optional[str]=None,
text: Optional[str]=None,
headers: Optional[LooseHeaders]=None,
content_type: Optional[str]=None,
charset: Optional[str]=None,
zlib_executor_size: Optional[int]=None,
zlib_executor: Executor=None) -> None:
if body is not None and text is not None:
raise ValueError("body and text are not allowed together")
if headers is None:
real_headers = CIMultiDict() # type: CIMultiDict[str]
elif not isinstance(headers, CIMultiDict):
real_headers = CIMultiDict(headers)
else:
real_headers = headers # = cast('CIMultiDict[str]', headers)
if content_type is not None and "charset" in content_type:
raise ValueError("charset must not be in content_type "
"argument")
if text is not None:
if hdrs.CONTENT_TYPE in real_headers:
if content_type or charset:
raise ValueError("passing both Content-Type header and "
"content_type or charset params "
"is forbidden")
else:
# fast path for filling headers
if not isinstance(text, str):
raise TypeError("text argument must be str (%r)" %
type(text))
if content_type is None:
content_type = 'text/plain'
if charset is None:
charset = 'utf-8'
real_headers[hdrs.CONTENT_TYPE] = (
content_type + '; charset=' + charset)
body = text.encode(charset)
text = None
else:
if hdrs.CONTENT_TYPE in real_headers:
if content_type is not None or charset is not None:
raise ValueError("passing both Content-Type header and "
"content_type or charset params "
"is forbidden")
else:
if content_type is not None:
if charset is not None:
content_type += '; charset=' + charset
real_headers[hdrs.CONTENT_TYPE] = content_type
super().__init__(status=status, reason=reason, headers=real_headers)
if text is not None:
self.text = text
else:
self.body = body
self._compressed_body = None # type: Optional[bytes]
self._zlib_executor_size = zlib_executor_size
self._zlib_executor = zlib_executor
@property
def body(self) -> Optional[Union[bytes, Payload]]:
return self._body
@body.setter
def body(self, body: bytes,
CONTENT_TYPE: istr=hdrs.CONTENT_TYPE,
CONTENT_LENGTH: istr=hdrs.CONTENT_LENGTH) -> None:
if body is None:
self._body = None # type: Optional[bytes]
self._body_payload = False # type: bool
elif isinstance(body, (bytes, bytearray)):
self._body = body
self._body_payload = False
else:
try:
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
except payload.LookupError:
raise ValueError('Unsupported body type %r' % type(body))
self._body_payload = True
headers = self._headers
# set content-length header if needed
if not self._chunked and CONTENT_LENGTH not in headers:
size = body.size
if size is not None:
headers[CONTENT_LENGTH] = str(size)
# set content-type
if CONTENT_TYPE not in headers:
headers[CONTENT_TYPE] = body.content_type
# copy payload headers
if body.headers:
for (key, value) in body.headers.items():
if key not in headers:
headers[key] = value
self._compressed_body = None
@property
def text(self) -> Optional[str]:
if self._body is None:
return None
return self._body.decode(self.charset or 'utf-8')
@text.setter
def text(self, text: str) -> None:
assert text is None or isinstance(text, str), \
"text argument must be str (%r)" % type(text)
if self.content_type == 'application/octet-stream':
self.content_type = 'text/plain'
if self.charset is None:
self.charset = 'utf-8'
self._body = text.encode(self.charset)
self._body_payload = False
self._compressed_body = None
@property
def content_length(self) -> Optional[int]:
if self._chunked:
return None
if hdrs.CONTENT_LENGTH in self._headers:
return super().content_length
if self._compressed_body is not None:
# Return length of the compressed body
return len(self._compressed_body)
elif self._body_payload:
# A payload without content length, or a compressed payload
return None
elif self._body is not None:
return len(self._body)
else:
return 0
@content_length.setter
def content_length(self, value: Optional[int]) -> None:
raise RuntimeError("Content length is set automatically")
async def write_eof(self, data: bytes=b'') -> None:
if self._eof_sent:
return
if self._compressed_body is None:
body = self._body # type: Optional[Union[bytes, Payload]]
else:
body = self._compressed_body
assert not data, "data arg is not supported, got {!r}".format(data)
assert self._req is not None
assert self._payload_writer is not None
if body is not None:
if (self._req._method == hdrs.METH_HEAD or
self._status in [204, 304]):
await super().write_eof()
elif self._body_payload:
payload = cast(Payload, body)
await payload.write(self._payload_writer)
await super().write_eof()
else:
await super().write_eof(cast(bytes, body))
else:
await super().write_eof()
async def _start(self, request: 'BaseRequest') -> AbstractStreamWriter:
if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
if not self._body_payload:
if self._body is not None:
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
else:
self._headers[hdrs.CONTENT_LENGTH] = '0'
return await super()._start(request)
def _compress_body(self, zlib_mode: int) -> None:
compressobj = zlib.compressobj(wbits=zlib_mode)
body_in = self._body
assert body_in is not None
self._compressed_body = \
compressobj.compress(body_in) + compressobj.flush()
async def _do_start_compression(self, coding: ContentCoding) -> None:
if self._body_payload or self._chunked:
return await super()._do_start_compression(coding)
if coding != ContentCoding.identity:
# Instead of using _payload_writer.enable_compression,
# compress the whole body
zlib_mode = (16 + zlib.MAX_WBITS
if coding == ContentCoding.gzip else -zlib.MAX_WBITS)
body_in = self._body
assert body_in is not None
if self._zlib_executor_size is not None and \
len(body_in) > self._zlib_executor_size:
await asyncio.get_event_loop().run_in_executor(
self._zlib_executor, self._compress_body, zlib_mode)
else:
self._compress_body(zlib_mode)
body_out = self._compressed_body
assert body_out is not None
self._headers[hdrs.CONTENT_ENCODING] = coding.value
self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
def json_response(data: Any=sentinel, *,
text: str=None,
body: bytes=None,
status: int=200,
reason: Optional[str]=None,
headers: LooseHeaders=None,
content_type: str='application/json',
dumps: JSONEncoder=json.dumps) -> Response:
if data is not sentinel:
if text or body:
raise ValueError(
"only one of data, text, or body should be specified"
)
else:
text = dumps(data)
return Response(text=text, body=body, status=status, reason=reason,
headers=headers, content_type=content_type)
| gnmiller/craig-bot | craig-bot/lib/python3.6/site-packages/aiohttp/web_response.py | Python | mit | 25,511 |
import pickle
import boto3
from botocore.exceptions import ClientError
from smsurvey.core.model.question import Question
from smsurvey.core.model.question import QuestionOperationException
from smsurvey import config
class QuestionService:
def __init__(self, cache_name=config.question_backend_name, local=config.local):
if local:
self.dynamo = boto3.client('dynamodb', region_name='us-west-2', endpoint_url=config.dynamo_url_local)
else:
self.dynamo = boto3.client('dynamodb', region_name='us-east-1')
self.cache_name = cache_name
def insert(self, protocol_id, question_number, question, safe=True):
if not issubclass(type(question), Question):
raise QuestionOperationException("Object is not a survey question")
if safe:
if self.get(protocol_id, question_number) is not None:
raise QuestionOperationException("Question with this ID already exists in cache")
dumped = pickle.dumps(question)
self.dynamo.put_item(
TableName=self.cache_name,
Item={
'question_number': {
'S': str(question_number)
},
'protocol_id': {
'S': str(protocol_id)
},
'question': {
'B': dumped
}
}
)
def get(self, protocol_id, question_number):
try:
response = self.dynamo.get_item(
TableName=self.cache_name,
Key={
'question_number': {'S': str(question_number)},
'protocol_id': {'S': str(protocol_id)}
}
)
except ClientError as e:
print(e.response['Error']['Message'])
raise QuestionOperationException("Error occurred trying to get item")
else:
if 'Item' in response:
return pickle.loads(response['Item']['question']['B'])
else:
return None
| nyu-mhealth/project-smsurvey | main/smsurvey/core/services/question_service.py | Python | gpl-3.0 | 2,074 |
import math
pt = 2
gap = 10
f = CurrentFont()
baseline = 0
descender = f.info.descender
xHeight = f.info.xHeight
capHeight = f.info.capHeight
ascender = f.info.ascender
angle = f.info.italicAngle
metrics = [baseline, descender, xHeight, capHeight, ascender]
g = f.newGlyph("fontmetrics")
p = g.getPen()
g.width = w = 500
if not angle:
angle = 0
a = math.radians(angle)
# robofont negative angle
a = -a
for m in metrics:
offset = math.tan(a) * m
p.moveTo((gap+offset, m))
p.lineTo((gap+offset, m+pt))
p.lineTo((w+offset, m+pt))
p.lineTo((w+offset, m))
p.closePath() | asaumierdemers/cabRoboFontScripts | Sketching/fontMetricsGlyph.py | Python | mit | 612 |
#!/usr/bin/env python
# Ingen Python Interface
# Copyright 2012-2015 David Robillard <http://drobilla.net>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import os
import rdflib
import re
import socket
import sys
try:
import StringIO.StringIO as StringIO
except ImportError:
from io import StringIO as StringIO
class NS:
atom = rdflib.Namespace('http://lv2plug.in/ns/ext/atom#')
ingen = rdflib.Namespace('http://drobilla.net/ns/ingen#')
ingerr = rdflib.Namespace('http://drobilla.net/ns/ingen/errors#')
lv2 = rdflib.Namespace('http://lv2plug.in/ns/lv2core#')
patch = rdflib.Namespace('http://lv2plug.in/ns/ext/patch#')
rdf = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
rsz = rdflib.Namespace('http://lv2plug.in/ns/ext/resize-port#')
xsd = rdflib.Namespace('http://www.w3.org/2001/XMLSchema#')
class Interface:
'The core Ingen interface'
def put(self, subject, body):
pass
def patch(self, subject, remove, add):
pass
def get(self, subject):
pass
def set(self, subject, key, value):
pass
def connect(self, tail, head):
pass
def disconnect(self, tail, head):
pass
def delete(self, subject):
pass
class Error(Exception):
def __init__(self, msg, cause):
Exception.__init__(self, '%s; cause: %s' % (msg, cause))
def lv2_path():
path = os.getenv('LV2_PATH')
if path:
return path
elif sys.platform == 'darwin':
return os.pathsep.join(['~/Library/Audio/Plug-Ins/LV2',
'~/.lv2',
'/usr/local/lib/lv2',
'/usr/lib/lv2',
'/Library/Audio/Plug-Ins/LV2'])
elif sys.platform == 'haiku':
return os.pathsep.join(['~/.lv2',
'/boot/common/add-ons/lv2'])
elif sys.platform == 'win32':
return os.pathsep.join([
os.path.join(os.getenv('APPDATA'), 'LV2'),
os.path.join(os.getenv('COMMONPROGRAMFILES'), 'LV2')])
else:
return os.pathsep.join(['~/.lv2',
'/usr/lib/lv2',
'/usr/local/lib/lv2'])
def ingen_bundle_path():
for d in lv2_path().split(os.pathsep):
bundle = os.path.abspath(os.path.join(d, 'ingen.lv2'))
if os.path.exists(bundle):
return bundle
return None
class Remote(Interface):
def __init__(self, uri='unix:///tmp/ingen.sock'):
self.msg_id = 1
self.server_base = uri + '/'
self.model = rdflib.Graph()
self.ns_manager = rdflib.namespace.NamespaceManager(self.model)
self.ns_manager.bind('server', self.server_base)
for (k, v) in NS.__dict__.items():
self.ns_manager.bind(k, v)
if uri.startswith('unix://'):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.connect(uri[len('unix://'):])
elif uri.startswith('tcp://'):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
parsed = re.split('[:/]', uri[len('tcp://'):])
addr = (parsed[0], int(parsed[1]))
self.sock.connect(addr)
else:
raise Exception('Unsupported server URI `%s' % uri)
# Parse error description from Ingen bundle for pretty printing
bundle = ingen_bundle_path()
if bundle:
self.model.parse(os.path.join(bundle, 'errors.ttl'), format='n3')
def __del__(self):
self.sock.close()
def _get_prefixes_string(self):
s = ''
for k, v in self.ns_manager.namespaces():
s += '@prefix %s: <%s> .\n' % (k, v)
return s
def msgencode(self, msg):
if sys.version_info[0] == 3:
return bytes(msg, 'utf-8')
else:
return msg
def update_model(self, update):
for i in update.triples([None, NS.rdf.type, NS.patch.Put]):
put = i[0]
subject = update.value(put, NS.patch.subject, None)
body = update.value(put, NS.patch.body, None)
desc = {}
for i in update.triples([body, None, None]):
self.model.add([subject, i[1], i[2]])
return update
def uri_to_path(self, uri):
path = uri
if uri.startswith(self.server_base):
return uri[len(self.server_base)-1:]
return uri
def recv(self):
'Read from socket until a NULL terminator is received'
msg = u''
while True:
c = self.sock.recv(1, 0).decode('utf-8')
if not c or ord(c[0]) == 0: # End of transmission
break
else:
msg += c[0]
return msg
def blank_closure(self, graph, node):
def blank_walk(node, g):
for i in g.triples([node, None, None]):
if type(i[2]) == rdflib.BNode and i[2] != node:
yield i[2]
blank_walk(i[2], g)
closure = [node]
for b in graph.transitiveClosure(blank_walk, node):
closure += [b]
return closure
def raise_error(self, code, cause):
klass = self.model.value(None, NS.ingerr.errorCode, rdflib.Literal(code))
if not klass:
raise Error('error %d' % code, cause)
fmt = self.model.value(klass, NS.ingerr.formatString, None)
if not fmt:
raise Error('%s' % klass, cause)
raise Error(fmt, cause)
def send(self, msg):
# Send message to server
payload = msg
if sys.version_info[0] == 3:
payload = bytes(msg, 'utf-8')
self.sock.send(self.msgencode(msg))
# Receive response and parse into a model
response_str = self._get_prefixes_string() + self.recv()
response_model = rdflib.Graph(namespace_manager=self.ns_manager)
# Because rdflib has embarrassingly broken base URI resolution that
# just drops path components from the base URI entirely (seriously),
# unfortunate the real server base URI can not be used here. Use
# <ingen:/> instead to at least avoid complete nonsense
response_model.parse(StringIO(response_str), 'ingen:/', format='n3')
# Add new prefixes to prepend to future responses because rdflib sucks
for line in response_str.split('\n'):
if line.startswith('@prefix'):
match = re.search('@prefix ([^:]*): <(.*)> *\.', line)
if match:
name = match.group(1)
uri = match.group(2)
self.ns_manager.bind(match.group(1), match.group(2))
# Handle response (though there should be only one)
blanks = []
response_desc = []
for i in response_model.triples([None, NS.rdf.type, NS.patch.Response]):
response = i[0]
subject = response_model.value(response, NS.patch.subject, None)
body = response_model.value(response, NS.patch.body, None)
response_desc += [i]
blanks += [response]
if body != 0:
self.raise_error(int(body), msg) # Raise exception on server error
# Find the blank node closure of all responses
blank_closure = []
for b in blanks:
blank_closure += self.blank_closure(response_model, b)
# Remove response descriptions from model
for b in blank_closure:
for t in response_model.triples([b, None, None]):
response_model.remove(t)
# Remove triples describing responses from response model
for i in response_desc:
response_model.remove(i)
# Update model with remaining information, e.g. patch:Put updates
return self.update_model(response_model)
def get(self, subject):
return self.send('''
[]
a patch:Get ;
patch:subject <%s> .
''' % subject)
def put(self, subject, body):
return self.send('''
[]
a patch:Put ;
patch:subject <%s> ;
patch:body [
%s
] .
''' % (subject, body))
def patch(self, subject, remove, add):
return self.send('''
[]
a patch:Patch ;
patch:subject <%s> ;
patch:remove [
%s
] ;
patch:add [
%s
] .
''' % (subject, remove, add))
def set(self, subject, key, value):
return self.send('''
[]
a patch:Set ;
patch:subject <%s> ;
patch:property <%s> ;
patch:value %s .
''' % (subject, key, value))
def connect(self, tail, head):
return self.send('''
[]
a patch:Put ;
patch:subject <%s> ;
patch:body [
a ingen:Arc ;
ingen:tail <%s> ;
ingen:head <%s> ;
] .
''' % (os.path.commonprefix([tail, head]), tail, head))
def disconnect(self, tail, head):
return self.send('''
[]
a patch:Delete ;
patch:body [
a ingen:Arc ;
ingen:tail <%s> ;
ingen:head <%s> ;
] .
''' % (tail, head))
def delete(self, subject):
return self.send('''
[]
a patch:Delete ;
patch:subject <%s> .
''' % subject)
| ventosus/ingen | scripts/ingen.py | Python | agpl-3.0 | 9,829 |
##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import IECore
import Gaffer
import GafferTest
import GafferImage
import GafferImageTest
class ImagePlugTest( GafferImageTest.ImageTestCase ) :
def testTileOrigin( self ) :
ts = GafferImage.ImagePlug.tileSize()
testCases = [
( IECore.V2i( ts-1, ts-1 ), IECore.V2i( 0, 0 ) ),
( IECore.V2i( ts, ts-1 ), IECore.V2i( ts, 0 ) ),
( IECore.V2i( ts, ts ), IECore.V2i( ts, ts ) ),
( IECore.V2i( ts*3-1, ts+5 ), IECore.V2i( ts*2, ts ) ),
( IECore.V2i( ts*3, ts-5 ), IECore.V2i( ts*3, 0 ) ),
( IECore.V2i( -ts+ts/2, 0 ), IECore.V2i( -ts, 0 ) ),
( IECore.V2i( ts*5+ts/3, -ts*4 ), IECore.V2i( ts*5, -ts*4 ) ),
( IECore.V2i( -ts+1, -ts-1 ), IECore.V2i( -ts, -ts*2 ) )
]
for input, expectedResult in testCases :
self.assertEqual(
GafferImage.ImagePlug.tileOrigin( input ),
expectedResult
)
def testDefaultChannelNamesMethod( self ) :
channelNames = GafferImage.ImagePlug()['channelNames'].defaultValue()
self.assertTrue( 'R' in channelNames )
self.assertTrue( 'G' in channelNames )
self.assertTrue( 'B' in channelNames )
def testCreateCounterpart( self ) :
p = GafferImage.ImagePlug()
p2 = p.createCounterpart( "a", Gaffer.Plug.Direction.Out )
self.assertEqual( p2.getName(), "a" )
self.assertEqual( p2.direction(), Gaffer.Plug.Direction.Out )
self.assertEqual( p2.getFlags(), p.getFlags() )
def testDynamicSerialisation( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["n"]["p"] = GafferImage.ImagePlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
ss = s.serialise()
s = Gaffer.ScriptNode()
s.execute( ss )
self.assertTrue( isinstance( s["n"]["p"], GafferImage.ImagePlug ) )
self.assertEqual( s["n"]["p"].getFlags(), Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
def testBoxPromotion( self ) :
b = Gaffer.Box()
b["n"] = GafferImage.Grade()
self.assertTrue( b.canPromotePlug( b["n"]["in"] ) )
self.assertTrue( b.canPromotePlug( b["n"]["out"] ) )
i = b.promotePlug( b["n"]["in"] )
o = b.promotePlug( b["n"]["out"] )
self.assertEqual( b["n"]["in"].getInput(), i )
self.assertEqual( o.getInput(), b["n"]["out"] )
self.assertTrue( b.plugIsPromoted( b["n"]["in"] ) )
self.assertTrue( b.plugIsPromoted( b["n"]["out"] ) )
def testTypeNamePrefixes( self ) :
self.assertTypeNamesArePrefixed( GafferImage )
self.assertTypeNamesArePrefixed( GafferImageTest )
def testDefaultNames( self ) :
self.assertDefaultNamesAreCorrect( GafferImage )
self.assertDefaultNamesAreCorrect( GafferImageTest )
def testImageHash( self ) :
r = GafferImage.ImageReader()
r['fileName'].setValue( os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checker.exr" ) )
h = r['out'].imageHash()
for i in range( 20 ) :
self.assertEqual( h, r['out'].imageHash() )
r['refreshCount'].setValue( 2 )
self.assertNotEqual( h, r['out'].imageHash() )
def testDefaultFormatForImage( self ) :
constant = GafferImage.Constant()
with Gaffer.Context() as c :
GafferImage.FormatPlug.setDefaultFormat( c, GafferImage.Format( 100, 200 ) )
self.assertEqual( constant["out"].image().displayWindow, IECore.Box2i( IECore.V2i( 0 ), IECore.V2i( 99, 199 ) ) )
GafferImage.FormatPlug.setDefaultFormat( c, GafferImage.Format( 200, 300 ) )
self.assertEqual( constant["out"].image().displayWindow, IECore.Box2i( IECore.V2i( 0 ), IECore.V2i( 199, 299 ) ) )
if __name__ == "__main__":
unittest.main()
| chippey/gaffer | python/GafferImageTest/ImagePlugTest.py | Python | bsd-3-clause | 5,340 |
# Wall
# Python forward compatibility
from __future__ import (division, absolute_import, print_function,
unicode_literals)
from wall import Brick as _Brick, randstr
import mpdclient2
# TODO: port to new brick architecture
class Brick(_Brick):
id = 'mpc'
maintainer = 'Thomas Karmann <thomas AT krmnn.de>'
js_module = 'wall.mpc'
post_type = 'MpcPost'
def post_new(self, type, **args):
m = mpdclient2.connect()
if not m:
return MpcPost(randstr(), "offline", m.currentsong())
return MpcPost(randstr(), "online", m.currentsong())
class MpcPost(object):
def __init__(self, id, status, currentsong):
self.id = id
self.status = status
self.currentsong = currentsong
self.__type__ = type(self).__name__
| sftech2013/liveaffiche | wall/bricks/mpc/__init__.py | Python | gpl-3.0 | 809 |
import logging
import django
django.setup()
from framework.celery_tasks import app as celery_app
from scripts.analytics.base import BaseAnalyticsHarness
from scripts.analytics.addon_snapshot import AddonSnapshot
from scripts.utils import add_file_logger
logger = logging.getLogger('scripts.analytics')
class SnapshotHarness(BaseAnalyticsHarness):
@property
def analytics_classes(self):
return [AddonSnapshot]
@celery_app.task(name='scripts.analytics.run_keen_snapshots')
def run_main():
add_file_logger(logger, __file__)
SnapshotHarness().main(command_line=False)
if __name__ == '__main__':
run_main()
| pattisdr/osf.io | scripts/analytics/run_keen_snapshots.py | Python | apache-2.0 | 637 |
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='signxml',
version="2.8.0",
url='https://github.com/kislyuk/signxml',
license='Apache Software License',
author='Andrey Kislyuk',
author_email='[email protected]',
description='Python XML Signature library',
long_description=open('README.rst').read(),
install_requires=[
'lxml >= 4.2.1, < 5',
'eight >= 0.4.2, < 2',
'cryptography >= 2.1.4, < 3',
'pyOpenSSL >= 17.5.0, < 20',
'certifi >= 2018.1.18'
],
extras_require={
':python_version == "2.7"': [
'enum34 >= 1.1.6, < 2',
'ipaddress >= 1.0.17, < 2'
]
},
packages=find_packages(exclude=['test']),
platforms=['MacOS X', 'Posix'],
package_data={'signxml': ['schemas/*.xsd']},
zip_safe=False,
include_package_data=True,
test_suite='test',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| kislyuk/signxml | setup.py | Python | apache-2.0 | 1,491 |
from frontend.models import Dataset, Silo, UserProfile
from django.contrib import admin
admin.site.register(Dataset)
admin.site.register(Silo)
admin.site.register(UserProfile)
| benosteen/django-databank | src/frontend/admin.py | Python | mit | 177 |
from mahjong.hand_calculating.hand import HandCalculator
from mahjong.meld import Meld
from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules
from mahjong.shanten import Shanten
from mahjong.tile import TilesConverter
calculator = HandCalculator()
# useful helper
def print_hand_result(hand_result):
print(hand_result.han, hand_result.fu)
print(hand_result.cost['main'])
print(hand_result.yaku)
for fu_item in hand_result.fu_details:
print(fu_item)
print('')
####################################################################
# Tanyao hand by ron #
####################################################################
# we had to use all 14 tiles in that array
tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444')
win_tile = TilesConverter.string_to_136_array(sou='4')[0]
result = calculator.estimate_hand_value(tiles, win_tile)
print_hand_result(result)
####################################################################
# Tanyao hand by tsumo #
####################################################################
result = calculator.estimate_hand_value(tiles, win_tile, config=HandConfig(is_tsumo=True))
print_hand_result(result)
####################################################################
# Add open set to hand #
####################################################################
melds = [Meld(meld_type=Meld.PON, tiles=TilesConverter.string_to_136_array(man='444'))]
result = calculator.estimate_hand_value(tiles, win_tile, melds=melds, config=HandConfig(options=OptionalRules(has_open_tanyao=True)))
print_hand_result(result)
####################################################################
# Shanten calculation #
####################################################################
shanten = Shanten()
tiles = TilesConverter.string_to_34_array(man='13569', pin='123459', sou='443')
result = shanten.calculate_shanten(tiles)
print(result)
####################################################################
# Kazoe as a sanbaiman #
####################################################################
tiles = TilesConverter.string_to_136_array(man='22244466677788')
win_tile = TilesConverter.string_to_136_array(man='7')[0]
melds = [
Meld(Meld.KAN, TilesConverter.string_to_136_array(man='2222'), False)
]
dora_indicators = [
TilesConverter.string_to_136_array(man='1')[0],
TilesConverter.string_to_136_array(man='1')[0],
TilesConverter.string_to_136_array(man='1')[0],
TilesConverter.string_to_136_array(man='1')[0],
]
config = HandConfig(is_riichi=True, options=OptionalRules(kazoe=HandConfig.KAZOE_SANBAIMAN))
result = calculator.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config)
print_hand_result(result)
####################################################################
# Change the cost of yaku #
####################################################################
config = HandConfig(is_renhou=True)
# renhou as an yakuman - old style
config.yaku.renhou.han_closed = 13
tiles = TilesConverter.string_to_136_array(man='22444', pin='333567', sou='444')
win_tile = TilesConverter.string_to_136_array(sou='4')[0]
result = calculator.estimate_hand_value(tiles, win_tile, config=config)
print_hand_result(result)
| MahjongRepository/mahjong | doc/examples.py | Python | mit | 3,558 |
"""
Forms and validation code for user registration.
"""
from django.contrib.auth.models import User
from django import forms
from django.utils.translation import ugettext_lazy as _
from registration.models import RegistrationProfile
from registration import fields
# I put this on all required fields, because it's easier to pick up
# on them with CSS or JavaScript if they have a class of "required"
# in the HTML. Your mileage may vary. If/when Django ticket #3515
# lands in trunk, this will no longer be necessary.
attrs_dict = { 'class': 'required' }
class RegistrationForm(forms.Form):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should either preserve the base ``save()`` or implement
a ``save()`` method which returns a ``User``.
"""
username = forms.RegexField(regex=r'^\w+$',
max_length=30,
widget=forms.TextInput(attrs=attrs_dict),
label=_(u'username'))
email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict,
maxlength=75)),
label=_(u'email address'))
password1 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password'))
password2 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password (again)'))
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use.
"""
user = User.get_by_key_name("key_"+self.cleaned_data['username'].lower())
if user:
raise forms.ValidationError(_(u'This username is already taken. Please choose another.'))
return self.cleaned_data['username']
def clean(self):
"""
Verifiy that the values entered into the two password fields
match. Note that an error here will end up in
``non_field_errors()`` because it doesn't apply to a single
field.
"""
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_(u'You must type the same password each time'))
return self.cleaned_data
def save(self, domain_override=""):
"""
Create the new ``User`` and ``RegistrationProfile``, and
returns the ``User`` (by calling
``RegistrationProfile.objects.create_inactive_user()``).
"""
new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],
password=self.cleaned_data['password1'],
email=self.cleaned_data['email'],
domain_override=domain_override,
)
return new_user
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput(attrs=attrs_dict),
label=_(u'I have read and agree to the Terms of Service'),
error_messages={ 'required': u"You must agree to the terms to register" })
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
email = self.cleaned_data['email'].lower()
if User.all().filter('email =', email).count(1):
raise forms.ValidationError(_(u'This email address is already in use. Please supply a different email address.'))
return email
class RegistrationFormUniqueEmailWithCaptcha(RegistrationFormUniqueEmail):
"""
Subclass that also has a Recaptcha field to prove user is human.
"""
recaptcha = fields.ReCaptchaField(label=_(u"Prove you're a human"))
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; moderately
useful for preventing automated spam registrations.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_(u'Registration using free email addresses is prohibited. Please supply a different email address.'))
return self.cleaned_data['email']
| avastjohn/maventy_new | registration/forms.py | Python | bsd-3-clause | 5,755 |
'''
New Integration Test for vip qos.
@author: Legion
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
qos = test_stub.VIPQOS()
def test():
qos.create_vm('l3VlanNetworkName1')
qos.create_eip()
test_obj_dict.add_vm(qos.vm)
test_obj_dict.add_vip(qos.vip)
port = test_stub.gen_random_port()
qos.set_vip_qos(1, 1, port, port)
qos.del_vip_qos()
port = test_stub.gen_random_port()
qos.set_vip_qos(21, 21, port, port)
qos.del_vip_qos()
port = test_stub.gen_random_port()
qos.set_vip_qos(21, 25, port, port)
qos.check_outbound_bandwidth()
qos.check_inbound_bandwidth()
qos.vip.delete()
test_obj_dict.rm_vm(qos.vm)
test_util.test_pass('VRouter Network VIP Port QoS Multi Creation Deletion Test Success')
def env_recover():
if qos.vm:
qos.vm.destroy()
if qos.vip:
qos.vip.delete()
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
| zstackorg/zstack-woodpecker | integrationtest/vm/virtualrouter/vip_qos/test_vrouter_vip_port_qos_multi_crt.py | Python | apache-2.0 | 1,199 |
from django.contrib import admin
from Sessions.models import SessionDetails
admin.site.register(SessionDetails)
| sachinkum/Bal-Aveksha | WebServer/Sessions/admin.py | Python | gpl-3.0 | 113 |
#! /usr/bin/env python
#
# Copyright (c) 2011 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import re
import subprocess
from sauron import logger
from sauron.metrics import Metric, MetricException
class ShellMetric(Metric):
def __init__(self, name, serializer, cmd, units, **kwargs):
Metric.__init__(self, name, serializer, **kwargs)
self.reconfig(name, cmd, serializer, units)
def reconfig(self, name, serializer, cmd, units, **kwargs):
Metric.reconfig(self, name, serializer, **kwargs)
self.cmd = cmd
self.units = units
def values(self):
try:
res = subprocess.Popen(self.cmd, shell=True, stdout=subprocess.PIPE).stdout.read().strip()
return {'results' : { self.name : (res, self.units) } }
except ValueError:
raise MetricException('Invalid call to Popen for %s' % cmd)
except OSError as e:
raise MetricException(e)
if __name__ == '__main__':
m = ShellMetric('testing', **{'count':'ls -l | wc -l', 'count-units':'Count'})
print repr(m.values())
| johnny-die-tulpe/sauron | sauron/metrics/ShellMetric.py | Python | mit | 2,117 |
import random
import state
import sequence as seq
import alignment
# NOTE: This function can't be tested, it is completely random :(
def sample(hmm, observations):
"""
Samples a finite number of times (observations) the given HMM. returns two sequences: State path and Emission sequence.
"""
random.seed() # force reseeding
state_path = seq.Sequence("State path", "")
emission_sequence = seq.Sequence("Sequence", "")
current_state = hmm.begin_state()
for i in range(observations):
current_state = current_state.sample_transition()
if current_state.is_end():
break
state_path.append(current_state.short_name)
emission_sequence.append(current_state.sample_emission())
return alignment.Alignment(emission_sequence, state_path)
| undeadpixel/mallet | mallet/sample.py | Python | mit | 810 |
"""
U.S. Geological Survey Earthquake Hazards Program Feed platform.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/geo_location/usgs_earthquakes_feed/
"""
from datetime import timedelta
import logging
from typing import Optional
import voluptuous as vol
from homeassistant.components.geo_location import (
PLATFORM_SCHEMA, GeoLocationEvent)
from homeassistant.const import (
ATTR_ATTRIBUTION, CONF_RADIUS, CONF_SCAN_INTERVAL,
EVENT_HOMEASSISTANT_START, CONF_LATITUDE, CONF_LONGITUDE)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect, dispatcher_send)
from homeassistant.helpers.event import track_time_interval
REQUIREMENTS = ['geojson_client==0.3']
_LOGGER = logging.getLogger(__name__)
ATTR_ALERT = 'alert'
ATTR_EXTERNAL_ID = 'external_id'
ATTR_MAGNITUDE = 'magnitude'
ATTR_PLACE = 'place'
ATTR_STATUS = 'status'
ATTR_TIME = 'time'
ATTR_TYPE = 'type'
ATTR_UPDATED = 'updated'
CONF_FEED_TYPE = 'feed_type'
CONF_MINIMUM_MAGNITUDE = 'minimum_magnitude'
DEFAULT_MINIMUM_MAGNITUDE = 0.0
DEFAULT_RADIUS_IN_KM = 50.0
DEFAULT_UNIT_OF_MEASUREMENT = 'km'
SCAN_INTERVAL = timedelta(minutes=5)
SIGNAL_DELETE_ENTITY = 'usgs_earthquakes_feed_delete_{}'
SIGNAL_UPDATE_ENTITY = 'usgs_earthquakes_feed_update_{}'
SOURCE = 'usgs_earthquakes_feed'
VALID_FEED_TYPES = [
'past_hour_significant_earthquakes',
'past_hour_m45_earthquakes',
'past_hour_m25_earthquakes',
'past_hour_m10_earthquakes',
'past_hour_all_earthquakes',
'past_day_significant_earthquakes',
'past_day_m45_earthquakes',
'past_day_m25_earthquakes',
'past_day_m10_earthquakes',
'past_day_all_earthquakes',
'past_week_significant_earthquakes',
'past_week_m45_earthquakes',
'past_week_m25_earthquakes',
'past_week_m10_earthquakes',
'past_week_all_earthquakes',
'past_month_significant_earthquakes',
'past_month_m45_earthquakes',
'past_month_m25_earthquakes',
'past_month_m10_earthquakes',
'past_month_all_earthquakes',
]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_FEED_TYPE): vol.In(VALID_FEED_TYPES),
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_RADIUS, default=DEFAULT_RADIUS_IN_KM): vol.Coerce(float),
vol.Optional(CONF_MINIMUM_MAGNITUDE, default=DEFAULT_MINIMUM_MAGNITUDE):
vol.All(vol.Coerce(float), vol.Range(min=0))
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the USGS Earthquake Hazards Program Feed platform."""
scan_interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
feed_type = config[CONF_FEED_TYPE]
coordinates = (config.get(CONF_LATITUDE, hass.config.latitude),
config.get(CONF_LONGITUDE, hass.config.longitude))
radius_in_km = config[CONF_RADIUS]
minimum_magnitude = config[CONF_MINIMUM_MAGNITUDE]
# Initialize the entity manager.
feed = UsgsEarthquakesFeedEntityManager(
hass, add_entities, scan_interval, coordinates, feed_type,
radius_in_km, minimum_magnitude)
def start_feed_manager(event):
"""Start feed manager."""
feed.startup()
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, start_feed_manager)
class UsgsEarthquakesFeedEntityManager:
"""Feed Entity Manager for USGS Earthquake Hazards Program feed."""
def __init__(self, hass, add_entities, scan_interval, coordinates,
feed_type, radius_in_km, minimum_magnitude):
"""Initialize the Feed Entity Manager."""
from geojson_client.usgs_earthquake_hazards_program_feed \
import UsgsEarthquakeHazardsProgramFeedManager
self._hass = hass
self._feed_manager = UsgsEarthquakeHazardsProgramFeedManager(
self._generate_entity, self._update_entity, self._remove_entity,
coordinates, feed_type, filter_radius=radius_in_km,
filter_minimum_magnitude=minimum_magnitude)
self._add_entities = add_entities
self._scan_interval = scan_interval
def startup(self):
"""Start up this manager."""
self._feed_manager.update()
self._init_regular_updates()
def _init_regular_updates(self):
"""Schedule regular updates at the specified interval."""
track_time_interval(
self._hass, lambda now: self._feed_manager.update(),
self._scan_interval)
def get_entry(self, external_id):
"""Get feed entry by external id."""
return self._feed_manager.feed_entries.get(external_id)
def _generate_entity(self, external_id):
"""Generate new entity."""
new_entity = UsgsEarthquakesEvent(self, external_id)
# Add new entities to HA.
self._add_entities([new_entity], True)
def _update_entity(self, external_id):
"""Update entity."""
dispatcher_send(self._hass, SIGNAL_UPDATE_ENTITY.format(external_id))
def _remove_entity(self, external_id):
"""Remove entity."""
dispatcher_send(self._hass, SIGNAL_DELETE_ENTITY.format(external_id))
class UsgsEarthquakesEvent(GeoLocationEvent):
"""This represents an external event with USGS Earthquake data."""
def __init__(self, feed_manager, external_id):
"""Initialize entity with data from feed entry."""
self._feed_manager = feed_manager
self._external_id = external_id
self._name = None
self._distance = None
self._latitude = None
self._longitude = None
self._attribution = None
self._place = None
self._magnitude = None
self._time = None
self._updated = None
self._status = None
self._type = None
self._alert = None
self._remove_signal_delete = None
self._remove_signal_update = None
async def async_added_to_hass(self):
"""Call when entity is added to hass."""
self._remove_signal_delete = async_dispatcher_connect(
self.hass, SIGNAL_DELETE_ENTITY.format(self._external_id),
self._delete_callback)
self._remove_signal_update = async_dispatcher_connect(
self.hass, SIGNAL_UPDATE_ENTITY.format(self._external_id),
self._update_callback)
@callback
def _delete_callback(self):
"""Remove this entity."""
self._remove_signal_delete()
self._remove_signal_update()
self.hass.async_create_task(self.async_remove())
@callback
def _update_callback(self):
"""Call update method."""
self.async_schedule_update_ha_state(True)
@property
def should_poll(self):
"""No polling needed for USGS Earthquake events."""
return False
async def async_update(self):
"""Update this entity from the data held in the feed manager."""
_LOGGER.debug("Updating %s", self._external_id)
feed_entry = self._feed_manager.get_entry(self._external_id)
if feed_entry:
self._update_from_feed(feed_entry)
def _update_from_feed(self, feed_entry):
"""Update the internal state from the provided feed entry."""
self._name = feed_entry.title
self._distance = feed_entry.distance_to_home
self._latitude = feed_entry.coordinates[0]
self._longitude = feed_entry.coordinates[1]
self._attribution = feed_entry.attribution
self._place = feed_entry.place
self._magnitude = feed_entry.magnitude
self._time = feed_entry.time
self._updated = feed_entry.updated
self._status = feed_entry.status
self._type = feed_entry.type
self._alert = feed_entry.alert
@property
def source(self) -> str:
"""Return source value of this external event."""
return SOURCE
@property
def name(self) -> Optional[str]:
"""Return the name of the entity."""
return self._name
@property
def distance(self) -> Optional[float]:
"""Return distance value of this external event."""
return self._distance
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of this external event."""
return self._latitude
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of this external event."""
return self._longitude
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return DEFAULT_UNIT_OF_MEASUREMENT
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = {}
for key, value in (
(ATTR_EXTERNAL_ID, self._external_id),
(ATTR_PLACE, self._place),
(ATTR_MAGNITUDE, self._magnitude),
(ATTR_TIME, self._time),
(ATTR_UPDATED, self._updated),
(ATTR_STATUS, self._status),
(ATTR_TYPE, self._type),
(ATTR_ALERT, self._alert),
(ATTR_ATTRIBUTION, self._attribution),
):
if value or isinstance(value, bool):
attributes[key] = value
return attributes
| tinloaf/home-assistant | homeassistant/components/geo_location/usgs_earthquakes_feed.py | Python | apache-2.0 | 9,359 |
'''
Copyright (c) 2015, Harsh Bhatia ([email protected])
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import sys
import numpy as np
from scipy import spatial
import logging
LOGGER = logging.getLogger(__name__)
from .utils.timer import Timer
class UnstructuredGrid(object):
'''Class to support nHHD on unstructured grids (triangular and tetrahedral)'''
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
def need_volumes(self):
''' Compute volumes/areas for vertices, simplices, and corners'''
if self.pvolumes.shape != (0,):
return self.pvolumes
def tri_area(o,a,b):
return np.linalg.norm(np.cross(b-o,a-o)) / 2.0
def tet_volume(o,a,b,c):
return np.abs(np.dot(a-o,np.cross(b-o,c-o))) / 6.0
LOGGER.info('Computing point areas/volumes')
mtimer = Timer()
self.svolumes = np.zeros(self.nsimplices,)
self.pvolumes = np.zeros(self.nvertices,)
self.cvolumes = np.zeros((self.nsimplices,self.dim+1))
# triangulation
if self.dim == 2:
for sidx in range(self.nsimplices):
simp = self.simplices[sidx]
verts = self.vertices[simp]
self.svolumes[sidx] = tri_area(verts[0], verts[1], verts[2])
# edge weights
e = [ verts[2]-verts[1], verts[0]-verts[2], verts[1]-verts[0] ]
l2 = [ np.dot(e[0],e[0]), np.dot(e[1],e[1]), np.dot(e[2],e[2]) ]
ew = [ l2[0]*(l2[1]+l2[2]-l2[0]), l2[1]*(l2[2]+l2[0]-l2[1]),l2[2]*(l2[0]+l2[1]-l2[2]) ]
# corner areas
if (ew[0] <= 0):
self.cvolumes[sidx,1] = -0.25 * l2[2] * self.svolumes[sidx] / np.dot(e[0], e[2])
self.cvolumes[sidx,2] = -0.25 * l2[1] * self.svolumes[sidx] / np.dot(e[0], e[1])
self.cvolumes[sidx,0] = self.svolumes[sidx] - self.cvolumes[sidx,1] - self.cvolumes[sidx,2]
elif (ew[1] <= 0):
self.cvolumes[sidx,2] = -0.25 * l2[0] * self.svolumes[sidx] / np.dot(e[1], e[0]);
self.cvolumes[sidx,0] = -0.25 * l2[2] * self.svolumes[sidx] / np.dot(e[1], e[2]);
self.cvolumes[sidx,1] = self.svolumes[sidx] - self.cvolumes[sidx,2] - self.cvolumes[sidx,0];
elif (ew[2] <= 0):
self.cvolumes[sidx,0] = -0.25 * l2[1] * self.svolumes[sidx] / np.dot(e[2], e[1]);
self.cvolumes[sidx,1] = -0.25 * l2[0] * self.svolumes[sidx] / np.dot(e[2], e[0]);
self.cvolumes[sidx,2] = self.svolumes[sidx] - self.cvolumes[sidx,0] - self.cvolumes[sidx,1];
else:
ewscale = 0.5 * self.svolumes[sidx] / (ew[0] + ew[1] + ew[2])
for d in range(3):
self.cvolumes[sidx,d] = ewscale * (ew[(d+1)%3] + ew[(d+2)%3])
self.pvolumes[simp[0]] += self.cvolumes[sidx,0]
self.pvolumes[simp[1]] += self.cvolumes[sidx,1]
self.pvolumes[simp[2]] += self.cvolumes[sidx,2]
# tetrahedralization
elif self.sdim == 3:
raise ValueError('TODO: pvolumes for 3D')
for sidx in range(self.nsimplices):
simp = self.simplices[sidx]
verts = self.vertices[simp]
self.svolumes[sidx] = tet_volume(verts[0], verts[1], verts[2], verts[3])
for v in simp:
self.pvolumes[v] += self.svolumes[sidx] / 4.0
mtimer.end()
LOGGER.info('Computing point areas/volume took {}'.format(mtimer))
return self.pvolumes
def need_adjacentfaces(self):
'''
Find adjacent faces for each vertex
as list of lists
'''
if len(self.adjacent_faces) != 0:
return self.adjacent_faces
LOGGER.info('Computing adjacent_faces')
mtimer = Timer()
numadjacentfaces = np.zeros(self.nvertices, dtype=int)
for f in self.simplices:
for i in range(3):
numadjacentfaces[f[i]] += 1
# can be optimized further by avoiding "append"?
self.adjacent_faces = [[] for _ in range(self.nvertices)]
for fidx in range(self.nsimplices):
for i in range(3):
self.adjacent_faces[self.simplices[fidx, i]].append(fidx)
mtimer.end()
LOGGER.info('Computing adjacent_faces took {}'.format(mtimer))
return self.adjacent_faces
def need_acrossedge(self):
'''
Find adjacent faces for each face (across each edge)
as ndarray of ints: shape (nsimplex, 3)
-1 denotes a face on the boundary (no face across edge)
'''
if self.across_edge.shape != (0,0):
return self.across_edge
self.need_adjacentfaces()
LOGGER.info('Computing across_edge')
mtimer = Timer()
self.across_edge = -1 * np.ones((self.nsimplices, 3), dtype=np.int)
for fidx in range(self.nsimplices):
for i in range(3):
if self.across_edge[fidx, i] != -1:
continue
v1 = self.simplices[fidx, (i+1)%3]
v2 = self.simplices[fidx, (i+2)%3]
for other in self.adjacent_faces[v1]:
if other == fidx:
continue
if other not in self.adjacent_faces[v2]:
continue
oface = self.simplices[other]
j = np.where(oface == v1)[0]
j = (j+1)%3
if oface[(j+1)%3] != v2:
continue
self.across_edge[fidx, i] = other
self.across_edge[other, j] = fidx
mtimer.end()
LOGGER.info('Computing across_edge took {}'.format(mtimer))
return self.across_edge
def need_boundary(self):
'''
Find boundary of the triangulation
as collection of boundary edeges as an array of [face, k]
'''
if self.bedges.shape != (0,0):
return self.bedges
self.need_acrossedge()
LOGGER.info('Computing the boundary')
mtimer = Timer()
# find all boundary faces and edges
bfaces = [fidx for fidx in range(self.nsimplices) if -1 in self.across_edge[fidx]]
bedges = []
for fidx in bfaces:
face = self.simplices[fidx]
nbrs = self.across_edge[fidx]
bedges.extend( [[fidx, k] for k in range(3) if nbrs[k] == -1] )
self.bedges = np.array(bedges)
LOGGER.info('Computing the boundary found {} boundary edges'.format(self.bedges.shape[0]))
mtimer.end()
LOGGER.info('Computing the boundary took {}'.format(mtimer))
return self.bedges
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
def __init__(self, **kwargs):
'''
kwargs:
vertices: ndarray of shape (nverts, dim) # dim = 2,3
simplices: ndarray of shape (nfaces, dim+1)
verbose: verbosity level
'''
args = list(kwargs.keys())
if 'vertices' not in args:
raise SyntaxError("Mesh object needs vertex data")
self.vertices = kwargs['vertices']
self.dim = self.vertices.shape[1]
self.nvertices = self.vertices.shape[0]
if self.dim != 2 and self.dim != 3:
raise SyntaxError("Mesh object works for 2D and 3D only")
LOGGER.info('Initializing {}D mesh with {} vertices'.format(self.dim, self.nvertices))
mtimer = Timer()
# create simplices if needed
if 'simplices' in args:
self.Delaunay = None
self.simplices = kwargs['simplices']
LOGGER.debug('got {} simplices'.format(self.simplices.shape[0]))
else:
LOGGER.debug('creating Delaunay mesh')
self.Delaunay = spatial.Delaunay(self.vertices)
self.simplices = self.Delaunay.simplices
LOGGER.debug('created {} simplices'.format(self.simplices.shape[0]))
self.nsimplices = self.simplices.shape[0]
if self.dim != self.simplices.shape[1]-1:
raise SyntaxError("Dimension mismatch! pdim = {} and sdim = {} do not match!".format(pdim, sdim))
self.adjacent_faces = []
self.across_edge = np.empty((0,0))
self.bedges = np.empty((0,0))
self.pvolumes = np.empty(0)
self.need_volumes()
self.need_boundary()
#self.need_meshmatrices(verbose > 1)
mtimer.end()
LOGGER.info('Initializing took {}'.format(mtimer))
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
def divcurl(self, vfield):
if vfield.shape != (self.nsimplices, self.dim):
LOGGER.error('vfield = {}, dim = {}, verts = {} simpls = {}'
.format(vfield.shape, self.dim, self.vertices.shape, self.simplices.shape))
raise ValueError("UnstructuredGrid requires a valid-dimensional vector field")
mtimer = Timer()
LOGGER.info('Computing divcurl')
div = np.zeros(self.nvertices)
curlw = np.zeros(self.nvertices)
# for each face
for sidx in range(self.nsimplices):
simp = self.simplices[sidx]
for k in range(3):
v = simp[k]
a = simp[ (k+1)%3 ]
b = simp[ (k+2)%3 ]
# normal and tangent vectors
tvec = self.vertices[b] - self.vertices[a] # counterclockwise
nvec = np.array([-tvec[1], tvec[0]]) # inward
dn = np.dot(nvec, vfield[sidx])
tn = np.dot(tvec, vfield[sidx])
div[v] += dn
curlw[v] += tn
# fix for boundary edges
for bedge in self.bedges:
sidx = bedge[0]
eidx = bedge[1]
a = self.simplices[sidx][(eidx+1)%3]
b = self.simplices[sidx][(eidx+2)%3]
tvec = self.vertices[b] - self.vertices[a] # counterclockwise
nvec = np.array([-tvec[1], tvec[0]]) # inward
dn = np.dot(nvec, vfield[sidx])
dt = np.dot(tvec, vfield[sidx])
div[a] += dn
div[b] += dn
curlw[a] += tn
curlw[b] += tn
div *= -0.5
curlw *= 0.5
mtimer.end()
LOGGER.info('Computing divcurl took {}'.format(mtimer))
return (div, curlw)
def gradient(self, sfield):
if sfield.shape[0] != self.nvertices:
LOGGER.error('sfield = {}, dim = {}, verts = {} simpls = {}'
.format(sfield.shape, self.dim, self.vertices.shape, self.simplices.shape))
raise ValueError("UnstructuredGrid requires a valid-dimensional scalar field")
mtimer = Timer()
LOGGER.info('Computing gradient')
grad = np.zeros((self.nsimplices, self.dim))
# for 2D
for sidx in range(self.nsimplices):
simp = self.simplices[sidx]
f = 0.5 / self.svolumes[sidx]
for k in range(3):
v = simp[k]
a = simp[ (k+1)%3 ]
b = simp[ (k+2)%3 ]
# normal and tangent vectors
tvec = self.vertices[b] - self.vertices[a] # counterclockwise
nvec = np.array([-tvec[1], tvec[0]]) # inward
grad[sidx] += f * sfield[v] * nvec
mtimer.end()
LOGGER.info('Computing gradient took {}'.format(mtimer))
return grad
def rotated_gradient(self, sfield):
rgrad = self.gradient(sfield)
rgrad[:,[0, 1]] = rgrad[:,[1, 0]]
rgrad[:,0] *= -1.0
return rgrad
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
| bhatiaharsh/naturalHHD | pynhhd-v1.1/pynhhd/unstructured.py | Python | bsd-2-clause | 13,575 |
# -*- coding: utf-8 -*-
import os
import sys
# Add the cola source directory to sys.path
abspath = os.path.abspath(os.path.realpath(__file__))
docdir = os.path.dirname(os.path.dirname(abspath))
srcdir = os.path.dirname(os.path.dirname(docdir))
extrasdir = os.path.join(srcdir, 'extras')
sys.path.insert(1, extrasdir)
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxtogithub']
templates_path = ['_templates']
source_suffix = '.rst'
source_encoding = 'utf-8'
master_doc = 'index'
project = 'git-cola'
copyright = '2007-2017, David Aguilar and contributors'
authors = 'David Aguilar and contributors'
versionfile = os.path.join(srcdir, 'cola', '_version.py')
scope = {}
with open(versionfile) as f:
exec(f.read(), scope)
# The short X.Y version.
version = scope['VERSION']
# The full version, including alpha/beta/rc tags.
release = version
exclude_trees = ['_build']
add_function_parentheses = True
pygments_style = 'default'
html_theme = 'default'
html_theme_path = ['_themes']
html_static_path = ['_static']
html_show_sourcelink = True
htmlhelp_basename = 'git-cola-doc'
man_pages = [
('git-cola', 'git-cola', 'The highly caffeinated Git GUI',
authors, '1'),
('git-dag', 'git-dag', 'The sleek and powerful Git history browser',
authors, '1'),
]
latex_documents = [
('index', 'git-cola.tex', 'git-cola Documentation',
'David Aguilar and contributors', 'manual'),
]
| antoniodemora/git-cola | share/doc/git-cola/conf.py | Python | gpl-2.0 | 1,507 |
#!/usr/bin/env python
import unittest
from textwrap import dedent, indent
from unittest_helpers import FIXTURE_DIR, load_fixture
# NOTE: This test file file only works with scripts/ added to PYTHONPATH so pylint can't find the imports
# pragma pylint: disable=import-error
from isolate_tests import extract_solidity_docs_cases, extract_yul_docs_cases
# pragma pylint: enable=import-error
CODE_BLOCK_RST_PATH = FIXTURE_DIR / 'code_block.rst'
CODE_BLOCK_RST_CONTENT = load_fixture(CODE_BLOCK_RST_PATH)
CODE_BLOCK_WITH_DIRECTIVES_RST_PATH = FIXTURE_DIR / 'code_block_with_directives.rst'
CODE_BLOCK_WITH_DIRECTIVES_RST_CONTENT = load_fixture(CODE_BLOCK_WITH_DIRECTIVES_RST_PATH)
def formatCase(text):
"""Formats code to contain only one indentation and terminate with a \n"""
return indent(dedent(text.lstrip("\n")), " ") + "\n"
class TestExtractDocsCases(unittest.TestCase):
def setUp(self):
self.maxDiff = 10000
def test_solidity_block(self):
expected_cases = [formatCase(case) for case in [
"""
// SPDX-License-Identifier: GPL-3.0
pragma solidity >=0.7.0 <0.9.0;
contract C {
function foo() public view {}
}
""",
"""
contract C {}
""",
]]
self.assertEqual(extract_solidity_docs_cases(CODE_BLOCK_RST_PATH), expected_cases)
def test_solidity_block_with_directives(self):
expected_cases = [formatCase(case) for case in [
"""
// SPDX-License-Identifier: GPL-3.0
pragma solidity >=0.7.0 <0.9.0;
contract C {
function foo() public view {}
}
""",
"""
contract C {}
""",
"""
contract D {}
:linenos:
""",
"""
contract E {}
""",
]]
self.assertEqual(extract_solidity_docs_cases(CODE_BLOCK_WITH_DIRECTIVES_RST_PATH), expected_cases)
def test_yul_block(self):
expected_cases = [formatCase(case) for case in [
"""
{
let x := add(1, 5)
}
""",
"""
// Yul code wrapped in object
{
{
let y := mul(3, 5)
}
}
""",
"""
// Yul code wrapped in named object
object "Test" {
{
let y := mul(6, 9)
}
}
""",
]]
self.assertEqual(extract_yul_docs_cases(CODE_BLOCK_RST_PATH), expected_cases)
def test_yul_block_with_directives(self):
expected_cases = [formatCase(case) for case in [
"""
{
let x := add(1, 5)
}
""",
"""
// Yul code wrapped in object
{
let y := mul(3, 5)
}
""",
"""
// Yul code wrapped in named object
object "Test" {
let y := mul(3, 5)
:linenos:
}
""",
]]
self.assertEqual(extract_yul_docs_cases(CODE_BLOCK_WITH_DIRECTIVES_RST_PATH), expected_cases)
| ethereum/solidity | test/scripts/test_isolate_tests.py | Python | gpl-3.0 | 3,398 |
from django.db import models
from django.template.defaultfilters import slugify
from tagging.fields import TagField
from tagging.utils import parse_tag_input
from django.contrib.auth.models import User
from django.conf import settings
import uuid,os
import datetime
# Create your models here.
def slugify_uniquely(value, model, slugfield="slug"):
"""Returns a slug on a name which is unique within a model's table
This code suffers a race condition between when a unique
slug is determined and when the object with that slug is saved.
It's also not exactly database friendly if there is a high
likelyhood of common slugs being attempted.
A good usage pattern for this code would be to add a custom save()
method to a model with a slug field along the lines of:
from django.template.defaultfilters import slugify
def save(self):
if not self.id:
# replace self.name with your prepopulate_from field
self.slug = SlugifyUniquely(self.name, self.__class__)
super(self.__class__, self).save()
Original pattern discussed at
http://www.b-list.org/weblog/2006/11/02/django-tips-auto-populated-fields
"""
suffix = 0
potential = base = slugify(value)
while True:
if suffix:
potential = "-".join([base, str(suffix)])
if not model.objects.filter(**{slugfield: potential}).count():
return potential
# we hit a conflicting slug, so bump the suffix & try again
suffix += 1
def get_file_path(instance, filename):
ext = filename.split('.')[-1]
filename = "%s.%s" % (instance.uuid, ext)
return os.path.join(settings.LOCAL_PHOTO_STORAGE_FOLDER, filename)
class TrovePhoto(models.Model):
user = models.ForeignKey(User)
full_size = models.URLField(max_length=255)
thumbnail = models.URLField(max_length=255)
trove_id = models.CharField(max_length=200)
def get_absolute_url(self):
return self.url
class LocalPhoto(models.Model):
user = models.ForeignKey(User)
uuid = models.CharField(max_length=200)
title = models.CharField(max_length=100,null=True,blank=True)
description = models.CharField(max_length=200,null=True,blank=True)
photo = models.ImageField(upload_to=get_file_path,null=True,blank=True)
date_uploaded = models.DateTimeField()
tags = TagField()
thumbnail = models.ImageField(upload_to="thumbnails/", editable=False)
def save(self):
if not self.id:
self.date_uploaded = datetime.datetime.now()
from PIL import Image
from cStringIO import StringIO
from django.core.files.uploadedfile import SimpleUploadedFile
# Set our max thumbnail size in a tuple (max width, max height)
THUMBNAIL_SIZE = (50, 50)
# Open original photo which we want to thumbnail using PIL's Image
# object
image = Image.open(settings.MEDIA_ROOT + self.photo.name)
# Convert to RGB if necessary
# Thanks to Limodou on DjangoSnippets.org
# http://www.djangosnippets.org/snippets/20/
if image.mode not in ('L', 'RGB'):
image = image.convert('RGB')
# We use our PIL Image object to create the thumbnail, which already
# has a thumbnail() convenience method that contrains proportions.
# Additionally, we use Image.ANTIALIAS to make the image look better.
# Without antialiasing the image pattern artifacts may result.
image.thumbnail(THUMBNAIL_SIZE, Image.ANTIALIAS)
# Save the thumbnail
temp_handle = StringIO()
image.save(temp_handle, 'png')
temp_handle.seek(0)
# Save to the thumbnail field
suf = SimpleUploadedFile(os.path.split(self.photo.name)[-1],
temp_handle.read(), content_type='image/png')
self.thumbnail.save(suf.name+'.png', suf, save=False)
# Save this photo instance
super(LocalPhoto, self).save()
def get_absolute_url(self):
return "%s%s" % (settings.LOCAL_PHOTO_URL,self.photo)
class SuperAlbumPhoto(TrovePhoto):
trove_photo = models.ForeignKey(TrovePhoto,related_name='superalbum_trove_photo')
album = models.ForeignKey('SuperAlbum')
order = models.IntegerField()
class SuperAlbum(models.Model):
user = models.ForeignKey(User)
album_name = models.CharField(max_length=100)
slug = models.SlugField(max_length=255,unique=True)
def save(self, *args, **kw):
if not self.slug:
self.slug = slugify_uniquely(self.album_name,self.__class__)
super(SuperAlbum, self).save(*args, **kw)
def get_photos(self):
photos = SuperAlbumPhoto.objects.filter(album=self).order_by('order')
return photos
| trove/trove-superalbums | superalbums/models.py | Python | mit | 5,090 |
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Julien Veyssier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.browser import BaseBrowser, BrowserHTTPNotFound
from .pages import SongResultsPage, SonglyricsPage, ArtistResultsPage, ArtistSongsPage
__all__ = ['SeeklyricsBrowser']
class SeeklyricsBrowser(BaseBrowser):
DOMAIN = 'www.seeklyrics.com'
PROTOCOL = 'http'
ENCODING = 'iso-8859-1'
USER_AGENT = BaseBrowser.USER_AGENTS['wget']
PAGES = {
'http://www.seeklyrics.com/search.php.*t=1': SongResultsPage,
'http://www.seeklyrics.com/search.php.*t=2': ArtistResultsPage,
'http://www.seeklyrics.com/lyrics/.*html': SonglyricsPage,
'http://www.seeklyrics.com/lyrics/.*/': ArtistSongsPage,
}
def iter_lyrics(self, criteria, pattern):
if criteria == 'artist':
type = 2
else:
type = 1
self.location('http://www.seeklyrics.com/search.php?q=%s&t=%s' % (pattern, type))
assert self.is_on_page(ArtistResultsPage) or self.is_on_page(SongResultsPage)
return self.page.iter_lyrics()
def get_lyrics(self, id):
try:
self.location('http://www.seeklyrics.com/lyrics/%s.html' % id)
except BrowserHTTPNotFound:
return
if self.is_on_page(SonglyricsPage):
return self.page.get_lyrics(id)
| franek/weboob | modules/seeklyrics/browser.py | Python | agpl-3.0 | 2,001 |
import discord
import datetime
import random
import requests
import json
import os
import sys
import math
import string
import imp
from datetime import datetime
from PythonGists import PythonGists
from discord.ext import commands
from cogs.utils.checks import *
'''
TextScript by CubeBag and LyricLy
Type >help ts to view cog info.
'''
class TextScript:
def __init__(self, bot):
self.bot = bot
config = load_config()
#drag and drop the file
self.bot_prefix = config["bot_identifier"]
@commands.command(pass_context=True)
async def ts(self,ctx,*,msg):
"""TextScript
Allows for functions to be done on bits of text inside of your Discord messages.
For Appu's Selfbot.
Ever wanted to run a command that changes some text inside of your Discord message without having to run it in a separate message? The TextScript cog will allow you to do that.
Currently, there is no place to upload/download other text scripts (like the ASCII for cogs). You will have to exchange the .ts.py files directly with creators.
To write your own script, name your file something in the format of `*.ts.py` and place it inside of your `textscript/` folder.
This will make the TextScript cog recognize it as a text script. There should already be a sample TextScript file you can look at there as a template.
Your message should be formatted like this:
`>ts some text that won't be formatted {tsfile.scriptname this is text that will be formatted} more text that won't be formatted`
where > is your command prefix
`tsfile` is the `.ts.py` file that contains a text script
`scriptname` is the name of the actual script (the function)
For example, let's say you had a script that put spaces in between each letter called space in a file named utils.ts.py. Your message would look something like this:
`>ts when you want that good ol {utils.space STABILITY}`
and that message would output
"when you want that good ol S T A B I L I T Y"
Credits: LyricLy for some help with code dealing with actually reading the scripts (the hard part)
"""
def modify(s): # s will look something like: "file.script this is some text ahasdfasidfahidsh"
instructions = s.split(None,1) # instructions will be a list, being something like this: ['file.script', 'the text dgoadoasoijfasdfaiosdjf']
#this next bit was the tough part, it sorta imports the script for use.
file, function = instructions[0].split(".") # This Line (Mostly) Written by LyricLy
module = imp.load_source(file, "textscript/" + file + ".ts.py")# This Line Written by LyricLy
func = eval("module." + function) # This Line Written by LyricLy
return func(instructions[1])
await ctx.message.delete()
# basically just iterate over any time something in the format of {script text} is found, and not anything like {script1 {script2 more text}}
# print(msg)
new_message = msg
pos = 0
while not pos == -1: #keep doing it until you are done
# print('test to see if it is looping infinitely')
check = 0
pos = -1
for i in range(0, len(new_message)):
# print('test to see if it is stuck in for loop, i=' + str(i) + ", letter=" + new_message[i] + ", check=" + str(check))
if new_message[i] == '{' and not new_message[i-1] == "\\":
# print('if')
check = 1
pos = i
elif new_message[i] == '}' and check == 1 and not new_message[i-1] == '\\':
# print('elif')
new_message = new_message[:pos] + modify(new_message[pos+1:i]) + new_message[i+1:] # everything up to but not including { + do script to text + everything after }
check = 2
break # get caught in while loop, basically just exit for loop and start over
if check == 1:
print(' ***** Warning: Reached end of message before a closing }. If you don\'t want your { to be processed as a script, use a backslash (like \{).')
raise Exception
await ctx.message.channel.send(new_message)
def setup(bot):
bot.add_cog(TextScript(bot))
# If the textscripts folder does not exist, create it and drop in the sample scripts
if not os.path.isdir("textscript"):
try:
print('***\nIt seems like you don\'t have a textscript/ folder, so I created it for you and added the built-in scripts.\nOpen each of the files in textscript/ to see the scripts that you can use,\nand type >help ts to read info about the cog.\n***')
os.makedirs('textscript')
file = open('textscript/template.ts.py', 'wb+') # template
file.write(("""
#
# In short, all actual scripts should be a python function
# that takes in a string as the only argument, and returns
# a single string.
#
# The input is whatever is inbetween the
# curly brackets in the original message, for example,
# {template.foo Input}
# would set the only argument to 'Input'.
#
# The output (everything after return) is what everything in the curly brackets gets replaced with.
#
# TextScript is designed with simplicity for the script writer in mind, so hopefully it makes sense.
# Remember, the file should be placed in the folder textscript and have the extension .ts.py
def foo(input): # input is whatever is inbetween the curly brackets after the script name
return input # what will replace the entire bracket group
def uppercase(input): # SAMPLE SCRIPT: Turns every letter uppercase. input is the text inside of the curly brackets.
output = '' # What will get returned.
for letter in input: # for every letter in the input
output += letter.upper() # append the uppercase form of that letter to the output
return output # return output""").encode('utf-8'))
file2 = open('textscript/utils.ts.py', 'wb+') # utils
file2.write(("""
import random
combining_diacritical_marks = '̴̵̶̡̢̧̨̛̍̎̄̅̿̑̆̐͒͗͑̇̈̊͂̓̈́͊͋͌̃̂̌͐̀́̋̏̒̓̔̽̉ͣͤͥͦͧͨͩͪͫͬͭͮͯ̾͛͆̀́̚̕͘͏̸̷͜͟͢͝͞͠͡҉̖̗̘̙̜̝̞̟̠̤̥̦̩̪̫̬̭̮̯̰̱̲̳̹̺̻̼͇͈͉͍͎͓͔͕͖͙͚̣ͅ'
def space(str):
new_str = ''
for letter in str:
new_str += letter + ' '
if len(new_str) > 0:
new_str = new_str[:-1] # trim the last space off of new_str
return new_str
def altcase(str): # randomly makes some letters uppercase and some lowercase
new_str = ''
for letter in str:
if random.random() > 0.5:
new_str += letter.upper()
else:
new_str += letter.lower()
return new_str
def uni(num):
return chr(int(num, 16))
def unidec(num):
return chr(int(num))
def code(s): # for when I'm on mobile and it's too hard to input a grave accent key
return '`' + s + '`'
####### for bold, ital, and boldital:
####### https://codepoints.net/mathematical_alphanumeric_symbols
def bold(text):
out = ''
for letter in text:
if (ord(letter) >= 0x41 and ord(letter) <= 0x5a): # if it is a letter A-Z
out += '{utils.uni ' + str(hex(0x1d3bf + ord(letter))) + '}' # translate to unicode code point
elif (ord(letter) >= 0x61 and ord(letter) <= 0x7a): # if it is a letter a-z
out += '{utils.uni ' + str(hex(0x1d3b9 + ord(letter))) + '}' # translate to unicode code point
else:
out += letter # it isn't a letter, so just include it in the output
return out
def ital(text):
out = ''
for letter in text:
if (ord(letter) >= 0x41 and ord(letter) <= 0x5a): # if it is a letter A-Z
out += '{utils.uni ' + str(hex(0x1d3f3 + ord(letter))) + '}' # translate to unicode code point for ital
elif (ord(letter) >= 0x61 and ord(letter) <= 0x7a): # if it is a letter a-z
if not (0x1d3ed + ord(letter) == 119893): # an exception for if the unicode character is U+1d455
out += '{utils.uni ' + str(hex(0x1d3ed + ord(letter))) + '}' # translate to unicode code point
else:
out += '{utils.uni 210e}' # replace U+1d455 (doesn't exist) with U+210e (Planck Constant)
else:
out += letter # it isn't a letter, so just include it in the output
return out
def boldital(text):
out = ''
for letter in text:
if (ord(letter) >= 0x41 and ord(letter) <= 0x5a): # if it is a letter A-Z
out += '{utils.uni ' + str(hex(0x1d427 + ord(letter))) + '}' # translate to unicode code point
elif (ord(letter) >= 0x61 and ord(letter) <= 0x7a): # if it is a letter a-z
out += '{utils.uni ' + str(hex(0x1d421 + ord(letter))) + '}' # translate to unicode code point
else:
out += letter # it isn't a letter, so just include it in the output
return out
def fullwidth(text):
out = ''
for letter in text:
if (ord(letter) >= 0x21 and ord(letter) <= 0x7e): # the great thing about the fullwidth unicode block is that the characters are in the same order as the common letters in Basic Latin
out += '{utils.uni ' + str(hex(0xfee0 + ord(letter))) + '}' # translate to unicode code point
else:
out += letter # it isn't a letter, so just include it in the output
return out
def zalgo(info): # format: info should be a number followed by a space and the text to zalgo. The number is the intensity
new_text = ''
text = info.split(None,1)[1]
intensity = int(info.split(None,1)[0])
for letter in text:
new_text += letter
for i in range(0, intensity):
new_text += combining_diacritical_marks[int(random.random()*len(combining_diacritical_marks))]
return new_text""").encode('utf-8'))
except:
raise | indolentriffraff/fihndos | cogs/textscript.py | Python | gpl-3.0 | 9,642 |
import abc
from cryptography.hazmat.backends.openssl.backend import backend
from cryptography.hazmat.primitives import serialization
TOKEN_SIGNATURE_RSA = 'RS256'
TOKEN_SIGNATURE_HMAC = 'HS256'
class AccessTokenSignature(object, metaclass=abc.ABCMeta):
def __init__(self):
pass
@abc.abstractmethod
def id(self):
raise NotImplementedError()
@abc.abstractmethod
def sign_key(self):
raise NotImplementedError()
@abc.abstractmethod
def validate_key(self):
raise NotImplementedError()
class RSAAccessTokenSignature(AccessTokenSignature):
def __init__(self, private_key=None, password=None, public_key=None):
AccessTokenSignature.__init__(self)
if private_key:
with open(private_key, "rb") as f:
self.private = serialization.load_pem_private_key(
f.read(),
password=password.encode(),
backend=backend)
else:
self.private = None
with open(public_key, "rb") as f:
self.public = serialization.load_pem_public_key(
f.read(),
backend=backend)
def id(self):
return TOKEN_SIGNATURE_RSA
def sign_key(self):
return self.private
def validate_key(self):
return self.public
class HMACAccessTokenSignature(AccessTokenSignature):
def __init__(self, key=None):
AccessTokenSignature.__init__(self)
self.key = key
def id(self):
return TOKEN_SIGNATURE_HMAC
def sign_key(self):
return self.key
def validate_key(self):
return self.key
| anthill-services/anthill-common | anthill/common/sign.py | Python | mit | 1,661 |
# Copyright (c) 2014, Menno Smits
# Released subject to the New BSD License
# Please see http://en.wikipedia.org/wiki/BSD_licenses
'''
Unit tests for the FetchTokeniser and FetchParser classes
'''
from __future__ import unicode_literals
from datetime import datetime
from imapclient.fixed_offset import FixedOffset
from imapclient.response_parser import parse_response, parse_fetch_response, ParseError
from imapclient.response_types import Envelope, Address
from imapclient.test.util import unittest
#TODO: test invalid dates and times
CRLF = b'\r\n'
class TestParseResponse(unittest.TestCase):
def test_unquoted(self):
self._test(b'FOO', b'FOO')
self._test(b'F.O:-O_0;', b'F.O:-O_0;')
self._test(br'\Seen', br'\Seen')
def test_string(self):
self._test(b'"TEST"', b'TEST')
def test_int(self):
self._test(b'45', 45)
def test_nil(self):
self._test(b'NIL', None)
def test_empty_tuple(self):
self._test(b'()', ())
def test_tuple(self):
self._test(b'(123 "foo" GeE)', (123, b'foo', b'GeE'))
def test_int_and_tuple(self):
self._test(b'1 (123 "foo")', (1, (123, b'foo')), wrap=False)
def test_nested_tuple(self):
self._test(b'(123 "foo" ("more" NIL) 66)',
(123, b"foo", (b"more", None), 66))
def test_deeper_nest_tuple(self):
self._test(b'(123 "foo" ((0 1 2) "more" NIL) 66)',
(123, b"foo", ((0, 1, 2), b"more", None), 66))
def test_complex_mixed(self):
self._test(b'((FOO "PLAIN" ("CHARSET" "US-ASCII") NIL NIL "7BIT" 1152 23) '
b'("TEXT" "PLAIN" ("CHARSET" "US-ASCII" "NAME" "cc.diff") '
b'"<hi.there>" "foo" "BASE64" 4554 73) "MIXED")',
((b'FOO', b'PLAIN', (b'CHARSET', b'US-ASCII'), None, None, b'7BIT', 1152, 23),
(b'TEXT', b'PLAIN', (b'CHARSET', b'US-ASCII', b'NAME', b'cc.diff'),
b'<hi.there>', b'foo', b'BASE64', 4554, 73), b'MIXED'))
def test_envelopey(self):
self._test(b'(UID 5 ENVELOPE ("internal_date" "subject" '
b'(("name" NIL "address1" "domain1.com")) '
b'((NIL NIL "address2" "domain2.com")) '
b'(("name" NIL "address3" "domain3.com")) '
b'((NIL NIL "address4" "domain4.com")) '
b'NIL NIL "<reply-to-id>" "<msg_id>"))',
(b'UID',
5,
b'ENVELOPE',
(b'internal_date',
b'subject',
((b'name', None, b'address1', b'domain1.com'),),
((None, None, b'address2', b'domain2.com'),),
((b'name', None, b'address3', b'domain3.com'),),
((None, None, b'address4', b'domain4.com'),),
None,
None,
b'<reply-to-id>',
b'<msg_id>')))
def test_envelopey_quoted(self):
self._test(b'(UID 5 ENVELOPE ("internal_date" "subject with \\"quotes\\"" '
b'(("name" NIL "address1" "domain1.com")) '
b'((NIL NIL "address2" "domain2.com")) '
b'(("name" NIL "address3" "domain3.com")) '
b'((NIL NIL "address4" "domain4.com")) '
b'NIL NIL "<reply-to-id>" "<msg_id>"))',
(b'UID',
5,
b'ENVELOPE',
(b'internal_date',
b'subject with "quotes"',
((b'name', None, b'address1', b'domain1.com'),),
((None, None, b'address2', b'domain2.com'),),
((b'name', None, b'address3', b'domain3.com'),),
((None, None, b'address4', b'domain4.com'),),
None,
None,
b'<reply-to-id>',
b'<msg_id>')))
def test_literal(self):
literal_text = add_crlf(
b"012\n"
b"abc def XYZ\n"
)
self._test([(b'{18}', literal_text)], literal_text)
def test_literal_with_more(self):
literal_text = add_crlf(
b"012\n"
b"abc def XYZ\n"
)
response = [(b'(12 "foo" {18}', literal_text), b")"]
self._test(response, (12, b'foo', literal_text))
def test_quoted_specials(self):
self._test(br'"\"foo bar\""', b'"foo bar"')
self._test(br'"foo \"bar\""', b'foo "bar"')
self._test(br'"foo\\bar"', br'foo\bar')
def test_square_brackets(self):
self._test(b'foo[bar rrr]', b'foo[bar rrr]')
self._test(b'"foo[bar rrr]"', b'foo[bar rrr]')
self._test(b'[foo bar]def', b'[foo bar]def')
self._test(b'(foo [bar rrr])', (b'foo', b'[bar rrr]'))
self._test(b'(foo foo[bar rrr])', (b'foo', b'foo[bar rrr]'))
def test_incomplete_tuple(self):
self._test_parse_error(b'abc (1 2', 'Tuple incomplete before "\(1 2"')
def test_bad_literal(self):
self._test_parse_error([(b'{99}', b'abc')],
'Expecting literal of size 99, got 3')
def test_bad_quoting(self):
self._test_parse_error(b'"abc next', """No closing '"'""")
def _test(self, to_parse, expected, wrap=True):
if wrap:
# convenience - expected value should be wrapped in another tuple
expected = (expected,)
if not isinstance(to_parse, list):
to_parse = [to_parse]
output = parse_response(to_parse)
self.assertSequenceEqual(output, expected)
def _test_parse_error(self, to_parse, expected_msg):
if not isinstance(to_parse, list):
to_parse = [to_parse]
self.assertRaisesRegex(ParseError, expected_msg,
parse_response, to_parse)
class TestParseFetchResponse(unittest.TestCase):
def test_basic(self):
self.assertEqual(parse_fetch_response([b'4 ()']), {4: {b'SEQ': 4}})
def test_none_special_case(self):
self.assertEqual(parse_fetch_response([None]), {})
def test_bad_msgid(self):
self.assertRaises(ParseError, parse_fetch_response, [b'abc ()'])
def test_bad_data(self):
self.assertRaises(ParseError, parse_fetch_response, [b'2 WHAT'])
def test_missing_data(self):
self.assertRaises(ParseError, parse_fetch_response, [b'2'])
def test_simple_pairs(self):
self.assertEqual(parse_fetch_response([b'23 (ABC 123 StUfF "hello")']),
{23: {b'ABC': 123,
b'STUFF': b'hello',
b'SEQ': 23}})
def test_odd_pairs(self):
self.assertRaises(ParseError, parse_fetch_response, [b'(ONE)'])
self.assertRaises(ParseError, parse_fetch_response, [b'(ONE TWO THREE)'])
def test_UID(self):
self.assertEqual(parse_fetch_response([b'23 (UID 76)']),
{76: {b'SEQ': 23}})
self.assertEqual(parse_fetch_response([b'23 (uiD 76)']),
{76: {b'SEQ': 23}})
def test_not_uid_is_key(self):
self.assertEqual(parse_fetch_response([b'23 (UID 76)'], uid_is_key=False),
{23: {b'UID': 76,
b'SEQ': 23}})
def test_bad_UID(self):
self.assertRaises(ParseError, parse_fetch_response, [b'(UID X)'])
def test_FLAGS(self):
self.assertEqual(parse_fetch_response([b'23 (FLAGS (\Seen Stuff))']),
{23: {b'SEQ': 23, b'FLAGS': (br'\Seen', b'Stuff')}})
def test_multiple_messages(self):
self.assertEqual(parse_fetch_response(
[b"2 (FLAGS (Foo Bar)) ",
b"7 (FLAGS (Baz Sneeve))"]),
{
2: {b'FLAGS': (b'Foo', b'Bar'), b'SEQ': 2},
7: {b'FLAGS': (b'Baz', b'Sneeve'), b'SEQ': 7},
})
def test_same_message_appearing_multiple_times(self):
# This can occur when server sends unsolicited FETCH responses
# (e.g. RFC 4551)
self.assertEqual(parse_fetch_response(
[b"2 (FLAGS (Foo Bar)) ",
b"2 (MODSEQ 4)"]),
{2: {b'FLAGS': (b'Foo', b'Bar'), b'SEQ': 2, b'MODSEQ': 4}})
def test_literals(self):
self.assertEqual(parse_fetch_response([(b'1 (RFC822.TEXT {4}', b'body'),
(b' RFC822 {21}', b'Subject: test\r\n\r\nbody'),
b')']),
{1: {b'RFC822.TEXT': b'body',
b'RFC822': b'Subject: test\r\n\r\nbody',
b'SEQ': 1}})
def test_literals_and_keys_with_square_brackets(self):
self.assertEqual(parse_fetch_response([(b'1 (BODY[TEXT] {11}', b'Hi there.\r\n'), b')']),
{ 1: {b'BODY[TEXT]': b'Hi there.\r\n',
b'SEQ': 1}})
def test_BODY_HEADER_FIELDS(self):
header_text = b'Subject: A subject\r\nFrom: Some one <[email protected]>\r\n\r\n'
self.assertEqual(parse_fetch_response(
[(b'123 (UID 31710 BODY[HEADER.FIELDS (from subject)] {57}', header_text), b')']),
{ 31710: {b'BODY[HEADER.FIELDS (FROM SUBJECT)]': header_text,
b'SEQ': 123}})
def test_BODY(self):
self.check_BODYish_single_part(b'BODY')
self.check_BODYish_multipart(b'BODY')
self.check_BODYish_nested_multipart(b'BODY')
def test_BODYSTRUCTURE(self):
self.check_BODYish_single_part(b'BODYSTRUCTURE')
self.check_BODYish_nested_multipart(b'BODYSTRUCTURE')
def check_BODYish_single_part(self, respType):
text = b'123 (UID 317 ' + respType + b'("TEXT" "PLAIN" ("CHARSET" "us-ascii") NIL NIL "7BIT" 16 1))'
parsed = parse_fetch_response([text])
self.assertEqual(parsed, {
317: {
respType: (b'TEXT', b'PLAIN', (b'CHARSET', b'us-ascii'), None, None, b'7BIT', 16, 1),
b'SEQ': 123
}
})
self.assertFalse(parsed[317][respType].is_multipart)
def check_BODYish_multipart(self, respType):
text = b'123 (UID 269 ' + respType + b' ' \
b'(("TEXT" "HTML" ("CHARSET" "us-ascii") NIL NIL "QUOTED-PRINTABLE" 55 3)' \
b'("TEXT" "PLAIN" ("CHARSET" "us-ascii") NIL NIL "7BIT" 26 1) "MIXED"))'
parsed = parse_fetch_response([text])
self.assertEqual(parsed, {
269: {
respType: ([(b'TEXT', b'HTML', (b'CHARSET', b'us-ascii'), None, None, b'QUOTED-PRINTABLE', 55, 3),
(b'TEXT', b'PLAIN', (b'CHARSET', b'us-ascii'), None, None, b'7BIT', 26, 1)],
b'MIXED'),
b'SEQ': 123}
})
self.assertTrue(parsed[269][respType].is_multipart)
def check_BODYish_nested_multipart(self, respType):
text = b'1 (' + respType + b'(' \
b'(' \
b'("text" "html" ("charset" "utf-8") NIL NIL "7bit" 97 3 NIL NIL NIL NIL)' \
b'("text" "plain" ("charset" "utf-8") NIL NIL "7bit" 62 3 NIL NIL NIL NIL)' \
b'"alternative" ("boundary" "===============8211050864078048428==") NIL NIL NIL' \
b')' \
b'("text" "plain" ("charset" "utf-8") NIL NIL "7bit" 16 1 NIL ("attachment" ("filename" "attachment.txt")) NIL NIL) ' \
b'"mixed" ("boundary" "===============0373402508605428821==") NIL NIL NIL))'
parsed = parse_fetch_response([text])
self.assertEqual(parsed, {1: {
respType: (
[
(
[
(b'text', b'html', (b'charset', b'utf-8'), None, None, b'7bit', 97, 3, None, None, None, None),
(b'text', b'plain', (b'charset', b'utf-8'), None, None, b'7bit', 62, 3, None, None, None, None)
], b'alternative', (b'boundary', b'===============8211050864078048428=='), None, None, None
),
(b'text', b'plain', (b'charset', b'utf-8'), None, None, b'7bit', 16, 1, None, (b'attachment', (b'filename', b'attachment.txt')), None, None)
], b'mixed', (b'boundary', b'===============0373402508605428821=='), None, None, None,
),
b'SEQ': 1,
}})
self.assertTrue(parsed[1][respType].is_multipart)
self.assertTrue(parsed[1][respType][0][0].is_multipart)
self.assertFalse(parsed[1][respType][0][0][0][0].is_multipart)
def test_partial_fetch(self):
body = b'01234567890123456789'
self.assertEqual(parse_fetch_response(
[(b'123 (UID 367 BODY[]<0> {20}', body), b')']),
{ 367: {b'BODY[]<0>': body,
b'SEQ': 123}})
def test_ENVELOPE(self):
envelope_str = (b'1 (ENVELOPE ( '
b'"Sun, 24 Mar 2013 22:06:10 +0200" '
b'"subject" '
b'(("name" NIL "address1" "domain1.com")) ' # from (name and address)
b'((NIL NIL "address2" "domain2.com")) ' # sender (just address)
b'(("name" NIL "address3" "domain3.com") NIL) ' # reply to
b'NIL' # to (no address)
b'((NIL NIL "address4" "domain4.com") ' # cc
b'("person" NIL "address4b" "domain4b.com")) '
b'NIL ' # bcc
b'"<reply-to-id>" '
b'"<msg_id>"))')
output = parse_fetch_response([envelope_str], normalise_times=False)
self.assertSequenceEqual(output[1][b'ENVELOPE'],
Envelope(
datetime(2013, 3, 24, 22, 6, 10, tzinfo=FixedOffset(120)),
b"subject",
(Address(b"name", None, b"address1", b"domain1.com"),),
(Address(None, None, b"address2", b"domain2.com"),),
(Address(b"name", None, b"address3", b"domain3.com"),),
None,
(Address(None, None, b"address4", b"domain4.com"),
Address(b"person", None, b"address4b", b"domain4b.com")),
None, b"<reply-to-id>", b"<msg_id>"
)
)
def test_ENVELOPE_with_no_date(self):
envelope_str = (
b'1 (ENVELOPE ( '
b'NIL '
b'"subject" '
b'NIL '
b'NIL '
b'NIL '
b'NIL '
b'NIL '
b'NIL '
b'"<reply-to-id>" '
b'"<msg_id>"))'
)
output = parse_fetch_response([envelope_str], normalise_times=False)
self.assertSequenceEqual(output[1][b'ENVELOPE'],
Envelope(
None,
b"subject",
None,
None,
None,
None,
None,
None,
b"<reply-to-id>", b"<msg_id>"
)
)
def test_ENVELOPE_with_empty_addresses(self):
envelope_str = (b'1 (ENVELOPE ( '
b'NIL '
b'"subject" '
b'(("name" NIL "address1" "domain1.com") NIL) '
b'(NIL (NIL NIL "address2" "domain2.com")) '
b'(("name" NIL "address3" "domain3.com") NIL ("name" NIL "address3b" "domain3b.com")) '
b'NIL'
b'((NIL NIL "address4" "domain4.com") '
b'("person" NIL "address4b" "domain4b.com")) '
b'NIL "<reply-to-id>" "<msg_id>"))')
output = parse_fetch_response([envelope_str], normalise_times=False)
self.assertSequenceEqual(output[1][b'ENVELOPE'],
Envelope(
None,
b"subject",
(Address(b"name", None, b"address1", b"domain1.com"),),
(Address(None, None, b"address2", b"domain2.com"),),
(Address(b"name", None, b"address3", b"domain3.com"),
Address(b"name", None, b"address3b", b"domain3b.com")),
None,
(Address(None, None, b"address4", b"domain4.com"),
Address(b"person", None, b"address4b", b"domain4b.com")),
None, b"<reply-to-id>", b"<msg_id>"
)
)
def test_INTERNALDATE(self):
def check(date_str, expected_dt):
output = parse_fetch_response([b'3 (INTERNALDATE "' + date_str + b'")'], normalise_times=False)
actual_dt = output[3][b'INTERNALDATE']
self.assertEqual(actual_dt, expected_dt)
check(b' 9-Feb-2007 17:08:08 -0430',
datetime(2007, 2, 9, 17, 8, 8, 0, FixedOffset(-4*60 - 30)))
check(b'12-Feb-2007 17:08:08 +0200',
datetime(2007, 2, 12, 17, 8, 8, 0, FixedOffset(2*60)))
check(b' 9-Dec-2007 17:08:08 +0000',
datetime(2007, 12, 9, 17, 8, 8, 0, FixedOffset(0)))
def test_INTERNALDATE_normalised(self):
def check(date_str, expected_dt):
output = parse_fetch_response([b'3 (INTERNALDATE "' + date_str + b'")'])
actual_dt = output[3][b'INTERNALDATE']
self.assertTrue(actual_dt.tzinfo is None) # Returned date should be in local timezone
expected_dt = datetime_to_native(expected_dt)
self.assertEqual(actual_dt, expected_dt)
check(b' 9-Feb-2007 17:08:08 -0430',
datetime(2007, 2, 9, 17, 8, 8, 0, FixedOffset(-4*60 - 30)))
check(b'12-Feb-2007 17:08:08 +0200',
datetime(2007, 2, 12, 17, 8, 8, 0, FixedOffset(2*60)))
check(b' 9-Dec-2007 17:08:08 +0000',
datetime(2007, 12, 9, 17, 8, 8, 0, FixedOffset(0)))
def test_mixed_types(self):
self.assertEqual(parse_fetch_response([(
b'1 (INTERNALDATE " 9-Feb-2007 17:08:08 +0100" RFC822 {21}',
b'Subject: test\r\n\r\nbody'
), b')']), {
1: {
b'INTERNALDATE': datetime_to_native(datetime(2007, 2, 9, 17, 8, 8, 0, FixedOffset(60))),
b'RFC822': b'Subject: test\r\n\r\nbody',
b'SEQ': 1
}
})
def add_crlf(text):
return CRLF.join(text.splitlines()) + CRLF
system_offset = FixedOffset.for_system()
def datetime_to_native(dt):
return dt.astimezone(system_offset).replace(tzinfo=None)
| maxiimou/imapclient | imapclient/test/test_response_parser.py | Python | bsd-3-clause | 18,629 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Proxy AMI-related calls from cloud controller to objectstore service."""
import base64
import binascii
import os
import shutil
import tarfile
import tempfile
import boto.s3.connection
import eventlet
from lxml import etree
from nova.api.ec2 import ec2utils
import nova.cert.rpcapi
from nova import exception
from nova.image import glance
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova import utils
LOG = logging.getLogger(__name__)
s3_opts = [
cfg.StrOpt('image_decryption_dir',
default='/tmp',
help='parent dir for tempdir used for image decryption'),
cfg.StrOpt('s3_host',
default='$my_ip',
help='hostname or ip for openstack to use when accessing '
'the s3 api'),
cfg.IntOpt('s3_port',
default=3333,
help='port used when accessing the s3 api'),
cfg.StrOpt('s3_access_key',
default='notchecked',
help='access key to use for s3 server for images'),
cfg.StrOpt('s3_secret_key',
default='notchecked',
help='secret key to use for s3 server for images'),
cfg.BoolOpt('s3_use_ssl',
default=False,
help='whether to use ssl when talking to s3'),
cfg.BoolOpt('s3_affix_tenant',
default=False,
help='whether to affix the tenant id to the access key '
'when downloading from s3'),
]
CONF = cfg.CONF
CONF.register_opts(s3_opts)
CONF.import_opt('my_ip', 'nova.netconf')
class S3ImageService(object):
"""Wraps an existing image service to support s3 based register."""
def __init__(self, service=None, *args, **kwargs):
self.cert_rpcapi = nova.cert.rpcapi.CertAPI()
self.service = service or glance.get_default_image_service()
self.service.__init__(*args, **kwargs)
def _translate_uuids_to_ids(self, context, images):
return [self._translate_uuid_to_id(context, img) for img in images]
def _translate_uuid_to_id(self, context, image):
image_copy = image.copy()
try:
image_uuid = image_copy['id']
except KeyError:
pass
else:
image_copy['id'] = ec2utils.glance_id_to_id(context, image_uuid)
for prop in ['kernel_id', 'ramdisk_id']:
try:
image_uuid = image_copy['properties'][prop]
except (KeyError, ValueError):
pass
else:
image_id = ec2utils.glance_id_to_id(context, image_uuid)
image_copy['properties'][prop] = image_id
return image_copy
def _translate_id_to_uuid(self, context, image):
image_copy = image.copy()
try:
image_id = image_copy['id']
except KeyError:
pass
else:
image_copy['id'] = ec2utils.id_to_glance_id(context, image_id)
for prop in ['kernel_id', 'ramdisk_id']:
try:
image_id = image_copy['properties'][prop]
except (KeyError, ValueError):
pass
else:
image_uuid = ec2utils.id_to_glance_id(context, image_id)
image_copy['properties'][prop] = image_uuid
return image_copy
def create(self, context, metadata, data=None):
"""Create an image.
metadata['properties'] should contain image_location.
"""
image = self._s3_create(context, metadata)
return image
def delete(self, context, image_id):
image_uuid = ec2utils.id_to_glance_id(context, image_id)
self.service.delete(context, image_uuid)
def update(self, context, image_id, metadata, data=None):
image_uuid = ec2utils.id_to_glance_id(context, image_id)
metadata = self._translate_id_to_uuid(context, metadata)
image = self.service.update(context, image_uuid, metadata, data)
return self._translate_uuid_to_id(context, image)
def detail(self, context, **kwargs):
#NOTE(bcwaldon): sort asc to make sure we assign lower ids
# to older images
kwargs.setdefault('sort_dir', 'asc')
images = self.service.detail(context, **kwargs)
return self._translate_uuids_to_ids(context, images)
def show(self, context, image_id):
image_uuid = ec2utils.id_to_glance_id(context, image_id)
image = self.service.show(context, image_uuid)
return self._translate_uuid_to_id(context, image)
@staticmethod
def _conn(context):
# NOTE(vish): access and secret keys for s3 server are not
# checked in nova-objectstore
access = CONF.s3_access_key
if CONF.s3_affix_tenant:
access = '%s:%s' % (access, context.project_id)
secret = CONF.s3_secret_key
calling = boto.s3.connection.OrdinaryCallingFormat()
return boto.s3.connection.S3Connection(aws_access_key_id=access,
aws_secret_access_key=secret,
is_secure=CONF.s3_use_ssl,
calling_format=calling,
port=CONF.s3_port,
host=CONF.s3_host)
@staticmethod
def _download_file(bucket, filename, local_dir):
key = bucket.get_key(filename)
local_filename = os.path.join(local_dir, os.path.basename(filename))
key.get_contents_to_filename(local_filename)
return local_filename
def _s3_parse_manifest(self, context, metadata, manifest):
manifest = etree.fromstring(manifest)
image_format = 'ami'
image_type = 'machine'
try:
kernel_id = manifest.find('machine_configuration/kernel_id').text
if kernel_id == 'true':
image_format = 'aki'
image_type = 'kernel'
kernel_id = None
except Exception:
kernel_id = None
try:
ramdisk_id = manifest.find('machine_configuration/ramdisk_id').text
if ramdisk_id == 'true':
image_format = 'ari'
image_type = 'ramdisk'
ramdisk_id = None
except Exception:
ramdisk_id = None
try:
arch = manifest.find('machine_configuration/architecture').text
except Exception:
arch = 'x86_64'
# NOTE(yamahata):
# EC2 ec2-budlne-image --block-device-mapping accepts
# <virtual name>=<device name> where
# virtual name = {ami, root, swap, ephemeral<N>}
# where N is no negative integer
# device name = the device name seen by guest kernel.
# They are converted into
# block_device_mapping/mapping/{virtual, device}
#
# Do NOT confuse this with ec2-register's block device mapping
# argument.
mappings = []
try:
block_device_mapping = manifest.findall('machine_configuration/'
'block_device_mapping/'
'mapping')
for bdm in block_device_mapping:
mappings.append({'virtual': bdm.find('virtual').text,
'device': bdm.find('device').text})
except Exception:
mappings = []
properties = metadata['properties']
properties['architecture'] = arch
def _translate_dependent_image_id(image_key, image_id):
image_uuid = ec2utils.ec2_id_to_glance_id(context, image_id)
properties[image_key] = image_uuid
if kernel_id:
_translate_dependent_image_id('kernel_id', kernel_id)
if ramdisk_id:
_translate_dependent_image_id('ramdisk_id', ramdisk_id)
if mappings:
properties['mappings'] = mappings
metadata.update({'disk_format': image_format,
'container_format': image_format,
'status': 'queued',
'is_public': False,
'properties': properties})
metadata['properties']['image_state'] = 'pending'
#TODO(bcwaldon): right now, this removes user-defined ids.
# We need to re-enable this.
image_id = metadata.pop('id', None)
image = self.service.create(context, metadata)
# extract the new uuid and generate an int id to present back to user
image_uuid = image['id']
image['id'] = ec2utils.glance_id_to_id(context, image_uuid)
# return image_uuid so the caller can still make use of image_service
return manifest, image, image_uuid
def _s3_create(self, context, metadata):
"""Gets a manifest from s3 and makes an image."""
image_path = tempfile.mkdtemp(dir=CONF.image_decryption_dir)
image_location = metadata['properties']['image_location']
bucket_name = image_location.split('/')[0]
manifest_path = image_location[len(bucket_name) + 1:]
bucket = self._conn(context).get_bucket(bucket_name)
key = bucket.get_key(manifest_path)
manifest = key.get_contents_as_string()
manifest, image, image_uuid = self._s3_parse_manifest(context,
metadata,
manifest)
def delayed_create():
"""This handles the fetching and decrypting of the part files."""
context.update_store()
log_vars = {'image_location': image_location,
'image_path': image_path}
def _update_image_state(context, image_uuid, image_state):
metadata = {'properties': {'image_state': image_state}}
self.service.update(context, image_uuid, metadata,
purge_props=False)
def _update_image_data(context, image_uuid, image_data):
metadata = {}
self.service.update(context, image_uuid, metadata, image_data,
purge_props=False)
_update_image_state(context, image_uuid, 'downloading')
try:
parts = []
elements = manifest.find('image').getiterator('filename')
for fn_element in elements:
part = self._download_file(bucket,
fn_element.text,
image_path)
parts.append(part)
# NOTE(vish): this may be suboptimal, should we use cat?
enc_filename = os.path.join(image_path, 'image.encrypted')
with open(enc_filename, 'w') as combined:
for filename in parts:
with open(filename) as part:
shutil.copyfileobj(part, combined)
except Exception:
LOG.exception(_("Failed to download %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_download')
return
_update_image_state(context, image_uuid, 'decrypting')
try:
hex_key = manifest.find('image/ec2_encrypted_key').text
encrypted_key = binascii.a2b_hex(hex_key)
hex_iv = manifest.find('image/ec2_encrypted_iv').text
encrypted_iv = binascii.a2b_hex(hex_iv)
dec_filename = os.path.join(image_path, 'image.tar.gz')
self._decrypt_image(context, enc_filename, encrypted_key,
encrypted_iv, dec_filename)
except Exception:
LOG.exception(_("Failed to decrypt %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_decrypt')
return
_update_image_state(context, image_uuid, 'untarring')
try:
unz_filename = self._untarzip_image(image_path, dec_filename)
except Exception:
LOG.exception(_("Failed to untar %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_untar')
return
_update_image_state(context, image_uuid, 'uploading')
try:
with open(unz_filename) as image_file:
_update_image_data(context, image_uuid, image_file)
except Exception:
LOG.exception(_("Failed to upload %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_upload')
return
metadata = {'status': 'active',
'properties': {'image_state': 'available'}}
self.service.update(context, image_uuid, metadata,
purge_props=False)
shutil.rmtree(image_path)
eventlet.spawn_n(delayed_create)
return image
def _decrypt_image(self, context, encrypted_filename, encrypted_key,
encrypted_iv, decrypted_filename):
elevated = context.elevated()
try:
key = self.cert_rpcapi.decrypt_text(elevated,
project_id=context.project_id,
text=base64.b64encode(encrypted_key))
except Exception, exc:
msg = _('Failed to decrypt private key: %s') % exc
raise exception.NovaException(msg)
try:
iv = self.cert_rpcapi.decrypt_text(elevated,
project_id=context.project_id,
text=base64.b64encode(encrypted_iv))
except Exception, exc:
raise exception.NovaException(_('Failed to decrypt initialization '
'vector: %s') % exc)
try:
utils.execute('openssl', 'enc',
'-d', '-aes-128-cbc',
'-in', '%s' % (encrypted_filename,),
'-K', '%s' % (key,),
'-iv', '%s' % (iv,),
'-out', '%s' % (decrypted_filename,))
except exception.ProcessExecutionError, exc:
raise exception.NovaException(_('Failed to decrypt image file '
'%(image_file)s: %(err)s') %
{'image_file': encrypted_filename,
'err': exc.stdout})
@staticmethod
def _test_for_malicious_tarball(path, filename):
"""Raises exception if extracting tarball would escape extract path."""
tar_file = tarfile.open(filename, 'r|gz')
for n in tar_file.getnames():
if not os.path.abspath(os.path.join(path, n)).startswith(path):
tar_file.close()
raise exception.NovaException(_('Unsafe filenames in image'))
tar_file.close()
@staticmethod
def _untarzip_image(path, filename):
S3ImageService._test_for_malicious_tarball(path, filename)
tar_file = tarfile.open(filename, 'r|gz')
tar_file.extractall(path)
image_file = tar_file.getnames()[0]
tar_file.close()
return os.path.join(path, image_file)
| maoy/zknova | nova/image/s3.py | Python | apache-2.0 | 16,517 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test gettxoutproof and verifytxoutproof RPCs."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class MerkleBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
# Nodes 0/1 are "wallet" nodes, Nodes 2/3 are used for testing
self.extra_args = [[], [], [], ["-txindex"]]
def setup_network(self):
self.setup_nodes()
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
connect_nodes(self.nodes[0], 3)
self.sync_all()
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(105)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 0)
node0utxos = self.nodes[0].listunspent(1)
tx1 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
txid1 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx1)["hex"])
tx2 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx2)["hex"])
# This will raise an exception because the transaction is not yet in a block
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
self.nodes[0].generate(1)
blockhash = self.nodes[0].getblockhash(chain_height + 1)
self.sync_all()
txlist = []
blocktxn = self.nodes[0].getblock(blockhash, True)["tx"]
txlist.append(blocktxn[1])
txlist.append(blocktxn[2])
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1])), [txid1])
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2])), txlist)
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2], blockhash)), txlist)
txin_spent = self.nodes[1].listunspent(1).pop()
tx3 = self.nodes[1].createrawtransaction([txin_spent], {self.nodes[0].getnewaddress(): 49.98})
txid3 = self.nodes[0].sendrawtransaction(self.nodes[1].signrawtransaction(tx3)["hex"])
self.nodes[0].generate(1)
self.sync_all()
txid_spent = txin_spent["txid"]
txid_unspent = txid1 if txin_spent["txid"] != txid1 else txid2
# We can't find the block from a fully-spent tx
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[2].gettxoutproof, [txid_spent])
# We can get the proof if we specify the block
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_spent], blockhash)), [txid_spent])
# We can't get the proof if we specify a non-existent block
assert_raises_rpc_error(-5, "Block not found", self.nodes[2].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
# We can get the proof if the transaction is unspent
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_unspent])), [txid_unspent])
# We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter.
assert_equal(sorted(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2]))), sorted(txlist))
assert_equal(sorted(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid2, txid1]))), sorted(txlist))
# We can always get a proof if we have a -txindex
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[3].gettxoutproof([txid_spent])), [txid_spent])
# We can't get a proof if we specify transactions from different blocks
assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[2].gettxoutproof, [txid1, txid3])
if __name__ == '__main__':
MerkleBlockTest().main()
| trippysalmon/bitcoin | test/functional/merkle_blocks.py | Python | mit | 4,390 |
"""
manage
======
Collection of helpers to manage database migrations, shell access, etc.
:copyright: Copyright (c) 2015 Andrey Martyanov. All rights reserved.
:license: MIT, see LICENSE for more details.
"""
from flask.ext.migrate import MigrateCommand
from flask.ext.script import Manager
from notv import models
from notv.app import create_app
from notv.extensions import db
app = create_app()
manager = Manager(app)
@manager.shell
def _make_context():
return dict(app=app, db=db, models=models)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| martyanov/notv | manage.py | Python | mit | 622 |
from django.contrib import admin
from models import (
Course,
CourseMedian,
CourseOffering,
DistributiveRequirement,
Instructor,
Student,
Review,
Vote,
)
admin.site.register(Course)
admin.site.register(CourseOffering)
admin.site.register(DistributiveRequirement)
admin.site.register(Instructor)
admin.site.register(CourseMedian)
admin.site.register(Review)
admin.site.register(Vote)
admin.site.register(Student)
| layuplist/layup-list | apps/web/admin.py | Python | gpl-3.0 | 445 |
from chucky_tools.base import ChuckyJoern
from chucky_tools.base import GroupedBatchTool
ARGPARSE_DESCRIPTION = """Condition normalization tool."""
QUERY = "idListToNodes({}).transform{{ it.normalize(['{}' : '$SYM']).toList() }}"
class ChuckyNormalizer(GroupedBatchTool, ChuckyJoern):
def __init__(self):
super(ChuckyNormalizer, self).__init__(ARGPARSE_DESCRIPTION)
def _initializeOptParser(self):
super(ChuckyNormalizer, self)._initializeOptParser()
self.argParser.add_argument(
'-e', '--echo',
action='store_true',
default=True,
help='''echo the input line'''
)
self.argParser.add_argument(
'-c', '--condition',
type=int,
default=0,
help='the column containing the node id of a condition'
)
self.argParser.add_argument(
'-s', '--symbol',
type=int,
default=0,
help='the column containing the symbol name '
)
def streamStart(self):
super(ChuckyNormalizer, self).streamStart()
self._group_by_columns = [self.args.symbol]
def process_group(self, group_key, group_data):
statement_ids = map(lambda x: int(x[self.args.condition]), group_data)
symbol = group_key[0]
query = QUERY.format(statement_ids, symbol)
results = self.run_query(query)
for line, result in zip(group_data, results):
if self.args.echo:
self.write_fields(line + result)
else:
self.write_fields(result)
| a0x77n/chucky-tools | src/chucky_tools/chucky_normalizer.py | Python | gpl-3.0 | 1,611 |
# -*- coding: utf-8 -*-
'''
Folium
-------
Make beautiful, interactive maps with Python and Leaflet.js
'''
from __future__ import print_function
from __future__ import division
import codecs
import json
import functools
from jinja2 import Environment, PackageLoader
from pkg_resources import resource_string, resource_filename
import utilities
from uuid import uuid4
ENV = Environment(loader=PackageLoader('folium', 'templates'))
def initialize_notebook():
"""Initialize the IPython notebook display elements"""
try:
from IPython.core.display import display, HTML
except ImportError:
print("IPython Notebook could not be loaded.")
lib_css = ENV.get_template('ipynb_init_css.html')
lib_js = ENV.get_template('ipynb_init_js.html')
leaflet_dvf = ENV.get_template('leaflet-dvf.markers.min.js')
display(HTML(lib_css.render()))
display(HTML(lib_js.render({'leaflet_dvf': leaflet_dvf.render()})))
def iter_obj(type):
'''Decorator to keep count of different map object types in self.mk_cnt'''
def decorator(func):
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
self.mark_cnt[type] = self.mark_cnt.get(type, 0) + 1
func_result = func(self, *args, **kwargs)
return func_result
return wrapper
return decorator
class Map(object):
'''Create a Map with Folium'''
def __init__(self, location=None, width=960, height=500,
tiles='OpenStreetMap', API_key=None, max_zoom=18,
zoom_start=10, attr=None):
'''Create a Map with Folium and Leaflet.js
Generate a base map of given width and height with either default
tilesets or a custom tileset URL. The following tilesets are built-in
to Folium. Pass any of the following to the "tiles" keyword:
-"OpenStreetMap"
-"MapQuest Open"
-"MapQuest Open Aerial"
-"Mapbox Bright" (Limited levels of zoom for free tiles)
-"Mapbox Control Room" (Limited levels of zoom for free tiles)
-"Stamen Terrain"
-"Stamen Toner"
-"Cloudmade" (Must pass API key)
-"Mapbox" (Must pass API key)
You can pass a custom tileset to Folium by passing a Leaflet-style
URL to the tiles parameter:
http://{s}.yourtiles.com/{z}/{x}/{y}.png
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Map (Northing, Easting).
width: int, default 960
Width of the map.
height: int, default 500
Height of the map.
tiles: str, default 'OpenStreetMap'
Map tileset to use. Can use defaults or pass a custom URL.
API_key: str, default None
API key for Cloudmade or Mapbox tiles.
max_zoom: int, default 18
Maximum zoom depth for the map.
zoom_start: int, default 10
Initial zoom level for the map.
attr: string, default None
Map tile attribution; only required if passing custom tile URL.
Returns
-------
Folium Map Object
Examples
--------
>>>map = folium.Map(location=[45.523, -122.675], width=750, height=500)
>>>map = folium.Map(location=[45.523, -122.675],
tiles='Mapbox Control Room')
>>>map = folium.Map(location=(45.523, -122.675), max_zoom=20,
tiles='Cloudmade', API_key='YourKey')
>>>map = folium.Map(location=[45.523, -122.675], zoom_start=2,
tiles=('http://{s}.tiles.mapbox.com/v3/'
'mapbox.control-room/{z}/{x}/{y}.png'),
attr='Mapbox attribution')
'''
#Inits
self.map_path = None
self.render_iframe = False
self.map_type = 'base'
self.map_id = '_'.join(['folium', uuid4().hex])
#Mark counter, JSON, Plugins
self.mark_cnt = {}
self.json_data = {}
self.plugins = {}
#Location
if not location:
raise ValueError('You must pass a Lat/Lon location to initialize'
' your map')
self.location = location
#Map Size Parameters
self.width = width
self.height = height
self.map_size = {'width': width, 'height': height}
self._size = ('style="width: {0}px; height: {1}px"'
.format(width, height))
#Templates
self.env = ENV
self.template_vars = {'lat': location[0], 'lon': location[1],
'size': self._size, 'max_zoom': max_zoom,
'zoom_level': zoom_start,
'map_id': self.map_id}
#Tiles
self.tiles = ''.join(tiles.lower().strip().split())
if self.tiles in ('cloudmade', 'mapbox') and not API_key:
raise ValueError('You must pass an API key if using Cloudmade'
' or non-default Mapbox tiles.')
self.default_tiles = ['openstreetmap', 'mapboxcontrolroom',
'mapquestopen', 'mapquestopenaerial',
'mapboxbright', 'mapbox', 'cloudmade',
'stamenterrain', 'stamentoner']
self.tile_types = {}
for tile in self.default_tiles:
self.tile_types[tile] = {'templ':
self.env.get_template(tile + '_tiles.txt'),
'attr':
self.env.get_template(tile + '_att.txt')}
if self.tiles in self.tile_types:
self.template_vars['Tiles'] = (self.tile_types[self.tiles]['templ']
.render(API_key=API_key))
self.template_vars['attr'] = (self.tile_types[self.tiles]['attr']
.render())
else:
self.template_vars['Tiles'] = tiles
if not attr:
raise ValueError('Custom tiles must'
' also be passed an attribution')
self.template_vars['attr'] = unicode(attr, 'utf8')
self.tile_types.update({'Custom': {'template': tiles, 'attr': attr}})
@iter_obj('simple')
def simple_marker(self, location=None, popup='Pop Text', popup_on=True):
'''Create a simple stock Leaflet marker on the map, with optional
popup text or Vincent visualization.
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Marker (Northing, Easting)
popup: string or tuple, default 'Pop Text'
Input text or visualization for object. Can pass either text,
or a tuple of the form (Vincent object, 'vis_path.json')
popup_on: boolean, default True
Pass false for no popup information on the marker
Returns
-------
Marker names and HTML in obj.template_vars
Example
-------
>>>map.simple_marker(location=[45.5, -122.3], popup='Portland, OR')
>>>map.simple_marker(location=[45.5, -122.3], popup=(vis, 'vis.json'))
'''
count = self.mark_cnt['simple']
mark_temp = self.env.get_template('simple_marker.js')
#Get marker and popup
marker = mark_temp.render({'marker': 'marker_' + str(count),
'lat': location[0], 'lon': location[1]})
popup_out = self._popup_render(popup=popup, mk_name='marker_',
count=count,
popup_on=popup_on)
add_mark = 'map.addLayer(marker_{0})'.format(count)
self.template_vars.setdefault('markers', []).append((marker,
popup_out,
add_mark))
@iter_obj('line')
def line(self, locations,
line_color=None, line_opacity=None, line_weight=None):
'''Add a line to the map with optional styles.
Parameters
----------
locations: list of points (latitude, longitude)
Latitude and Longitude of line (Northing, Easting)
line_color: string, default Leaflet's default ('#03f')
line_opacity: float, default Leaflet's default (0.5)
line_weight: float, default Leaflet's default (5)
Note: If the optional styles are omitted, they will not be included
in the HTML output and will obtain the Leaflet defaults listed above.
Example
-------
>>>map.line(locations=[(45.5, -122.3), (42.3, -71.0)])
>>>map.line(locations=[(45.5, -122.3), (42.3, -71.0)],
line_color='red', line_opacity=1.0)
'''
count = self.mark_cnt['line']
line_temp = self.env.get_template('polyline.js')
polyline_opts = {'color': line_color,
'weight': line_weight,
'opacity': line_opacity}
varname = 'line_{}'.format(count)
line_rendered = line_temp.render({'line': varname,
'locations': locations,
'options': polyline_opts})
add_line = 'map.addLayer({});'.format(varname)
self.template_vars.setdefault('lines', []).append((line_rendered,
add_line))
@iter_obj('circle')
def circle_marker(self, location=None, radius=500, popup='Pop Text',
popup_on=True, line_color='black', fill_color='black',
fill_opacity=0.6):
'''Create a simple circle marker on the map, with optional popup text
or Vincent visualization.
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Marker (Northing, Easting)
radius: int, default 500
Circle radius, in pixels
popup: string or tuple, default 'Pop Text'
Input text or visualization for object. Can pass either text,
or a tuple of the form (Vincent object, 'vis_path.json')
popup_on: boolean, default True
Pass false for no popup information on the marker
line_color: string, default black
Line color. Can pass hex value here as well.
fill_color: string, default black
Fill color. Can pass hex value here as well.
fill_opacity: float, default 0.6
Circle fill opacity
Returns
-------
Circle names and HTML in obj.template_vars
Example
-------
>>>map.circle_marker(location=[45.5, -122.3],
radius=1000, popup='Portland, OR')
>>>map.circle_marker(location=[45.5, -122.3],
radius=1000, popup=(bar_chart, 'bar_data.json'))
'''
count = self.mark_cnt['circle']
circle_temp = self.env.get_template('circle_marker.js')
circle = circle_temp.render({'circle': 'circle_' + str(count),
'radius': radius,
'lat': location[0], 'lon': location[1],
'line_color': line_color,
'fill_color': fill_color,
'fill_opacity': fill_opacity})
popup_out = self._popup_render(popup=popup, mk_name='circle_',
count=count,
popup_on=popup_on)
add_mark = 'map.addLayer(circle_{0})'.format(count)
self.template_vars.setdefault('markers', []).append((circle,
popup_out,
add_mark))
@iter_obj('polygon')
def polygon_marker(self, location=None, line_color='black', line_opacity=1,
line_weight=2, fill_color='blue', fill_opacity=1,
num_sides=4, rotation=0, radius=15, popup='Pop Text',
popup_on=True):
'''Custom markers using the Leaflet Data Vis Framework.
Parameters
----------
location: tuple or list, default None
Latitude and Longitude of Marker (Northing, Easting)
line_color: string, default 'black'
Marker line color
line_opacity: float, default 1
Line opacity, scale 0-1
line_weight: int, default 2
Stroke weight in pixels
fill_color: string, default 'blue'
Marker fill color
fill_opacity: float, default 1
Marker fill opacity
num_sides: int, default 4
Number of polygon sides
rotation: int, default 0
Rotation angle in degrees
radius: int, default 15
Marker radius, in pixels
popup: string or tuple, default 'Pop Text'
Input text or visualization for object. Can pass either text,
or a tuple of the form (Vincent object, 'vis_path.json')
popup_on: boolean, default True
Pass false for no popup information on the marker
Returns
-------
Polygon marker names and HTML in obj.template_vars
'''
count = self.mark_cnt['polygon']
poly_temp = self.env.get_template('poly_marker.js')
polygon = poly_temp.render({'marker': 'polygon_' + str(count),
'lat': location[0],
'lon': location[1],
'line_color': line_color,
'line_opacity': line_opacity,
'line_weight': line_weight,
'fill_color': fill_color,
'fill_opacity': fill_opacity,
'num_sides': num_sides,
'rotation': rotation,
'radius': radius})
popup_out = self._popup_render(popup=popup, mk_name='polygon_',
count=count,
popup_on=popup_on)
add_mark = 'map.addLayer(polygon_{0})'.format(count)
self.template_vars.setdefault('markers', []).append((polygon,
popup_out,
add_mark))
#Update JS/CSS and other Plugin files
js_temp = self.env.get_template('dvf_js_ref.txt').render()
self.template_vars.update({'dvf_js': js_temp})
polygon_js = resource_string('folium',
'plugins/leaflet-dvf.markers.min.js')
self.plugins.update({'leaflet-dvf.markers.min.js': polygon_js})
def lat_lng_popover(self):
'''Enable popovers to display Lat and Lon on each click'''
latlng_temp = self.env.get_template('lat_lng_popover.js')
self.template_vars.update({'lat_lng_pop': latlng_temp.render()})
def click_for_marker(self, popup=None):
'''Enable the addition of markers via clicking on the map. The marker
popup defaults to Lat/Lon, but custom text can be passed via the
popup parameter. Double click markers to remove them.
Parameters
----------
popup:
Custom popup text
Example
-------
>>>map.click_for_marker(popup='Your Custom Text')
'''
latlng = '"Latitude: " + lat + "<br>Longitude: " + lng '
click_temp = self.env.get_template('click_for_marker.js')
if popup:
popup_txt = ''.join(['"', popup, '"'])
else:
popup_txt = latlng
click_str = click_temp.render({'popup': popup_txt})
self.template_vars.update({'click_pop': click_str})
def _popup_render(self, popup=None, mk_name=None, count=None,
popup_on=True):
'''Popup renderer: either text or Vincent/Vega.
Parameters
----------
popup: str or Vincent tuple, default None
String for text popup, or tuple of (Vincent object, json_path)
mk_name: str, default None
Type of marker. Simple, Circle, etc.
count: int, default None
Count of marker
popup_on: boolean, default True
If False, no popup will be rendered
'''
if not popup_on:
return 'var no_pop = null;'
else:
if isinstance(popup, str):
popup_temp = self.env.get_template('simple_popup.js')
return popup_temp.render({'pop_name': mk_name + str(count),
'pop_txt': json.dumps(popup)})
elif isinstance(popup, tuple):
#Update template with JS libs
vega_temp = self.env.get_template('vega_ref.txt').render()
jquery_temp = self.env.get_template('jquery_ref.txt').render()
d3_temp = self.env.get_template('d3_ref.txt').render()
vega_parse = self.env.get_template('vega_parse.js').render()
self.template_vars.update({'vega': vega_temp,
'd3': d3_temp,
'jquery': jquery_temp,
'vega_parse': vega_parse})
#Parameters for Vega template
vega = popup[0]
mark = ''.join([mk_name, str(count)])
json_out = popup[1]
div_id = popup[1].split('.')[0]
width = vega.width
height = vega.height
if isinstance(vega.padding, dict):
width += vega.padding['left']+vega.padding['right']
height += vega.padding['top']+vega.padding['bottom']
else:
width += 75
height += 50
max_width = self.map_size['width']
vega_id = '#' + div_id
popup_temp = self.env.get_template('vega_marker.js')
return popup_temp.render({'mark': mark, 'div_id': div_id,
'width': width, 'height': height,
'max_width': max_width,
'json_out': json_out,
'vega_id': vega_id})
@iter_obj('geojson')
def geo_json(self, geo_path=None, geo_str=None, data_out='data.json',
data=None, columns=None, key_on=None, threshold_scale=None,
fill_color='blue', fill_opacity=0.6, line_color='black',
line_weight=1, line_opacity=1, legend_name=None,
topojson=None, reset=False):
'''Apply a GeoJSON overlay to the map.
Plot a GeoJSON overlay on the base map. There is no requirement
to bind data (passing just a GeoJSON plots a single-color overlay),
but there is a data binding option to map your columnar data to
different feature objects with a color scale.
If data is passed as a Pandas dataframe, the "columns" and "key-on"
keywords must be included, the first to indicate which DataFrame
columns to use, the second to indicate the layer in the GeoJSON
on which to key the data. The 'columns' keyword does not need to be
passed for a Pandas series.
Colors are generated from color brewer (http://colorbrewer2.org/)
sequential palettes on a D3 threshold scale. The scale defaults to the
following quantiles: [0, 0.5, 0.75, 0.85, 0.9]. A custom scale can be
passed to `threshold_scale` of length <=6, in order to match the
color brewer range.
TopoJSONs can be passed as "geo_path", but the "topojson" keyword must
also be passed with the reference to the topojson objects to convert.
See the topojson.feature method in the TopoJSON API reference:
https://github.com/mbostock/topojson/wiki/API-Reference
Parameters
----------
geo_path: string, default None
URL or File path to your GeoJSON data
geo_str: string, default None
String of GeoJSON, alternative to geo_path
data_out: string, default 'data.json'
Path to write Pandas DataFrame/Series to JSON if binding data
data: Pandas DataFrame or Series, default None
Data to bind to the GeoJSON.
columns: dict or tuple, default None
If the data is a Pandas DataFrame, the columns of data to be bound.
Must pass column 1 as the key, and column 2 the values.
key_on: string, default None
Variable in the GeoJSON file to bind the data to. Must always
start with 'feature' and be in JavaScript objection notation.
Ex: 'feature.id' or 'feature.properties.statename'.
threshold_scale: list, default None
Data range for D3 threshold scale. Defaults to the following range
of quantiles: [0, 0.5, 0.75, 0.85, 0.9], rounded to the nearest
order-of-magnitude integer. Ex: 270 rounds to 200, 5600 to 6000.
fill_color: string, default 'blue'
Area fill color. Can pass a hex code, color name, or if you are
binding data, one of the following color brewer palettes:
'BuGn', 'BuPu', 'GnBu', 'OrRd', 'PuBu', 'PuBuGn', 'PuRd', 'RdPu',
'YlGn', 'YlGnBu', 'YlOrBr', and 'YlOrRd'.
fill_opacity: float, default 0.6
Area fill opacity, range 0-1.
line_color: string, default 'black'
GeoJSON geopath line color.
line_weight: int, default 1
GeoJSON geopath line weight.
line_opacity: float, default 1
GeoJSON geopath line opacity, range 0-1.
legend_name: string, default None
Title for data legend. If not passed, defaults to columns[1].
topojson: string, default None
If using a TopoJSON, passing "objects.yourfeature" to the topojson
keyword argument will enable conversion to GeoJSON.
reset: boolean, default False
Remove all current geoJSON layers, start with new layer
Output
------
GeoJSON data layer in obj.template_vars
Example
-------
>>>map.geo_json(geo_path='us-states.json', line_color='blue', line_weight=3)
>>>map.geo_json(geo_path='geo.json', data=df, columns=['Data 1', 'Data 2'],
key_on='feature.properties.myvalue', fill_color='PuBu',
threshold_scale=[0, 20, 30, 40, 50, 60])
>>>map.geo_json(geo_path='countries.json', topojson='objects.countries')
'''
if reset:
reset_vars = ['json_paths', 'func_vars', 'color_scales', 'geo_styles',
'gjson_layers', 'map_legends', 'topo_convert']
for var in reset_vars:
self.template_vars.update({var: []})
self.mark_cnt['geojson'] = 1
def json_style(style_cnt, line_color, line_weight, line_opacity,
fill_color, fill_opacity, quant_fill):
'''Generate JSON styling function from template'''
style_temp = self.env.get_template('geojson_style.js')
style = style_temp.render({'style': style_cnt,
'line_color': line_color,
'line_weight': line_weight,
'line_opacity': line_opacity,
'fill_color': fill_color,
'fill_opacity': fill_opacity,
'quantize_fill': quant_fill})
return style
#Set map type to geojson
self.map_type = 'geojson'
#Get JSON map layer template pieces, convert TopoJSON if necessary
# geo_str is really a hack
if geo_path:
geo_path = ".defer(d3.json, '{0}')".format(geo_path)
elif geo_str:
geo_path = (".defer(function(callback)"
"{{callback(null, JSON.parse('{}'))}})").format(geo_str)
if topojson is None:
map_var = '_'.join(['gjson', str(self.mark_cnt['geojson'])])
layer_var = map_var
else:
map_var = '_'.join(['tjson', str(self.mark_cnt['geojson'])])
topo_obj = '.'.join([map_var, topojson])
layer_var = '_'.join(['topo', str(self.mark_cnt['geojson'])])
topo_templ = self.env.get_template('topo_func.js')
topo_func = topo_templ.render({'map_var': layer_var,
't_var': map_var,
't_var_obj': topo_obj})
topo_lib = self.env.get_template('topojson_ref.txt').render()
self.template_vars.update({'topojson': topo_lib})
self.template_vars.setdefault('topo_convert',
[]).append(topo_func)
style_count = '_'.join(['style', str(self.mark_cnt['geojson'])])
#Get Data binding pieces if available
if data is not None:
import pandas as pd
#Create DataFrame with only the relevant columns
if isinstance(data, pd.DataFrame):
data = pd.concat([data[columns[0]], data[columns[1]]], axis=1)
#Save data to JSON
self.json_data[data_out] = utilities.transform_data(data)
#Add data to queue
d_path = ".defer(d3.json, '{0}')".format(data_out)
self.template_vars.setdefault('json_paths', []).append(d_path)
#Add data variable to makeMap function
data_var = '_'.join(['data', str(self.mark_cnt['geojson'])])
self.template_vars.setdefault('func_vars', []).append(data_var)
#D3 Color scale
if isinstance(data, pd.DataFrame):
series = data[columns[1]]
else:
series = data
domain = threshold_scale or utilities.split_six(series=series)
if len(domain) > 253:
raise ValueError('The threshold scale must be of length <= 253')
if not utilities.color_brewer(fill_color):
raise ValueError('Please pass a valid color brewer code to '
'fill_local. See docstring for valid codes.')
palette = utilities.color_brewer(fill_color, len(domain))
d3range = palette[0: len(domain) + 1]
tick_labels = utilities.legend_scaler(domain)
color_temp = self.env.get_template('d3_threshold.js')
d3scale = color_temp.render({'domain': domain,
'range': d3range})
self.template_vars.setdefault('color_scales', []).append(d3scale)
#Create legend
name = legend_name or columns[1]
leg_templ = self.env.get_template('d3_map_legend.js')
lin_min = domain[0] - abs(domain[0]*0.1)
lin_max = domain[-1] + abs(domain[-1]*0.1)
legend = leg_templ.render({'lin_min': lin_min,
'lin_max': lin_max,
'tick_labels': tick_labels,
'caption': name})
self.template_vars.setdefault('map_legends', []).append(legend)
#Style with color brewer colors
matchColor = 'color(matchKey({0}, {1}))'.format(key_on, data_var)
style = json_style(style_count, line_color, line_weight,
line_opacity, None, fill_opacity, matchColor)
else:
style = json_style(style_count, line_color, line_weight,
line_opacity, fill_color, fill_opacity, None)
layer = ('gJson_layer_{0} = L.geoJson({1}, {{style: {2}}}).addTo(map)'
.format(self.mark_cnt['geojson'], layer_var, style_count))
self.template_vars.setdefault('json_paths', []).append(geo_path)
self.template_vars.setdefault('func_vars', []).append(map_var)
self.template_vars.setdefault('geo_styles', []).append(style)
self.template_vars.setdefault('gjson_layers', []).append(layer)
def _build_map(self, html_templ=None, templ_type='string'):
'''Build HTML/JS/CSS from Templates given current map type'''
if html_templ is None:
map_types = {'base': 'fol_template.html',
'geojson': 'geojson_template.html'}
#Check current map type
type_temp = map_types[self.map_type]
html_templ = self.env.get_template(type_temp)
else:
if templ_type == 'string':
html_templ = self.env.from_string(html_templ)
self.HTML = html_templ.render(self.template_vars)
def create_map(self, path='map.html', plugin_data_out=True, template=None):
'''Write Map output to HTML and data output to JSON if available
Parameters:
-----------
path: string, default 'map.html'
Path for HTML output for map
plugin_data_out: boolean, default True
If using plugins such as awesome markers, write all plugin
data such as JS/CSS/images to path
template: string, default None
Custom template to render
'''
self.map_path = path
self._build_map(template)
with codecs.open(path, 'w', 'utf-8') as f:
f.write(self.HTML)
if self.json_data:
for path, data in self.json_data.iteritems():
with open(path, 'w') as g:
json.dump(data, g)
if self.plugins and plugin_data_out:
for name, plugin in self.plugins.iteritems():
with open(name, 'w') as f:
f.write(plugin)
def _repr_html_(self):
"""Build the HTML representation for IPython."""
map_types = {'base': 'ipynb_repr.html',
'geojson': 'ipynb_iframe.html'}
#Check current map type
type_temp = map_types[self.map_type]
if self.render_iframe:
type_temp = 'ipynb_iframe.html'
templ = self.env.get_template(type_temp)
self._build_map(html_templ=templ, templ_type='temp')
if self.map_type == 'geojson' or self.render_iframe:
if not self.map_path:
raise ValueError('Use create_map to set the path!')
return templ.render(path=self.map_path, width=self.width,
height=self.height)
return self.HTML
def display(self):
"""Display the visualization inline in the IPython notebook.
This is deprecated, use the following instead::
from IPython.display import display
display(viz)
"""
from IPython.core.display import display, HTML
display(HTML(self._repr_html_()))
| UDST/folium | folium/folium.py | Python | mit | 32,390 |
import numpy as np
import paddle.fluid as fluid
# reproducible
np.random.seed(1)
class PolicyGradient:
def __init__(
self,
n_actions,
n_features,
learning_rate=0.01,
reward_decay=0.95,
output_graph=False, ):
self.n_actions = n_actions
self.n_features = n_features
self.lr = learning_rate
self.gamma = reward_decay
self.ep_obs, self.ep_as, self.ep_rs = [], [], []
self.place = fluid.CPUPlace()
self.exe = fluid.Executor(self.place)
def build_net(self):
obs = fluid.layers.data(
name='obs', shape=[self.n_features], dtype='float32')
acts = fluid.layers.data(name='acts', shape=[1], dtype='int64')
vt = fluid.layers.data(name='vt', shape=[1], dtype='float32')
# fc1
fc1 = fluid.layers.fc(input=obs, size=10, act="tanh") # tanh activation
# fc2
all_act_prob = fluid.layers.fc(input=fc1,
size=self.n_actions,
act="softmax")
self.inferece_program = fluid.defaul_main_program().clone()
# to maximize total reward (log_p * R) is to minimize -(log_p * R)
neg_log_prob = fluid.layers.cross_entropy(
input=self.all_act_prob,
label=acts) # this is negative log of chosen action
neg_log_prob_weight = fluid.layers.elementwise_mul(x=neg_log_prob, y=vt)
loss = fluid.layers.reduce_mean(
neg_log_prob_weight) # reward guided loss
sgd_optimizer = fluid.optimizer.SGD(self.lr)
sgd_optimizer.minimize(loss)
self.exe.run(fluid.default_startup_program())
def choose_action(self, observation):
prob_weights = self.exe.run(self.inferece_program,
feed={"obs": observation[np.newaxis, :]},
fetch_list=[self.all_act_prob])
prob_weights = np.array(prob_weights[0])
# select action w.r.t the actions prob
action = np.random.choice(
range(prob_weights.shape[1]), p=prob_weights.ravel())
return action
def store_transition(self, s, a, r):
self.ep_obs.append(s)
self.ep_as.append(a)
self.ep_rs.append(r)
def learn(self):
# discount and normalize episode reward
discounted_ep_rs_norm = self._discount_and_norm_rewards()
tensor_obs = np.vstack(self.ep_obs).astype("float32")
tensor_as = np.array(self.ep_as).astype("int64")
tensor_as = tensor_as.reshape([tensor_as.shape[0], 1])
tensor_vt = discounted_ep_rs_norm.astype("float32")[:, np.newaxis]
# train on episode
self.exe.run(
fluid.default_main_program(),
feed={
"obs": tensor_obs, # shape=[None, n_obs]
"acts": tensor_as, # shape=[None, ]
"vt": tensor_vt # shape=[None, ]
})
self.ep_obs, self.ep_as, self.ep_rs = [], [], [] # empty episode data
return discounted_ep_rs_norm
def _discount_and_norm_rewards(self):
# discount episode rewards
discounted_ep_rs = np.zeros_like(self.ep_rs)
running_add = 0
for t in reversed(range(0, len(self.ep_rs))):
running_add = running_add * self.gamma + self.ep_rs[t]
discounted_ep_rs[t] = running_add
# normalize episode rewards
discounted_ep_rs -= np.mean(discounted_ep_rs)
discounted_ep_rs /= np.std(discounted_ep_rs)
return discounted_ep_rs
| kuke/models | fluid/policy_gradient/brain.py | Python | apache-2.0 | 3,616 |
"""AppAssure 5 REST API"""
from appassure.api import AppAssureAPI
class ILicenseManagement(AppAssureAPI):
"""Full documentation online at
http://docs.appassure.com/display/AA50D/ILicenseManagement
"""
def getAgentLicenseInfo(self, agentId):
"""Gets licensing information for the given agent."""
return self.session.request('license/agent/%s'
% (agentId))
def changeGroupKey(self, groupKey):
"""Gets new group key from the UI, validates it, and then
returns the validation results.
"""
return self.session.request('license/changeGroupKey/%s'
% (groupKey), 'POST')
def getCoreLicenseInfo(self):
"""Gets core licensing information."""
return self.session.request('license/core')
def isKeySpecifiedAndValid(self):
"""Gets state of the key."""
return self.session.request('license/key')
def getLicenseInfo(self):
"""Gets licensing information for Core and all the agents."""
return self.session.request('license/licenseInfo')
def getLicenseStatesNotifications(self):
"""Gets license states notifications for Core and all the
agents.
"""
return self.session.request('license/licenseStatesNotifications')
def forcePhoneHome(self):
"""Forces connection with License Portal immediately."""
return self.session.request('license/phoneHome/force', 'POST')
def isPhoneHomeEnable(self):
"""Determines if the phone home operation is enabled."""
return self.session.request('license/phoneHome/isEnable')
def isPhoneHomeInProgress(self):
"""Determines if the phone home operation is in progress."""
return self.session.request('license/phoneHome/isInProgress')
| rshipp/python-appassure | appassure/core/ILicenseManagement.py | Python | bsd-3-clause | 1,811 |
"""
Copyright (c) 2014, Austin R. Benson, David F. Gleich,
Purdue University, and Stanford University.
All rights reserved.
This file is part of MRNMF and is under the BSD 2-Clause License,
which can be found in the LICENSE file in the root directory, or at
http://opensource.org/licenses/BSD-2-Clause
"""
from NMF_algs import *
import matplotlib
import matplotlib.pyplot as plt
if 1:
path1 = 'data/synthetic_200M_200_20/NMF_200M_200_20-qrr.txt'
cols_path = 'data/synthetic_200M_200_20/NMF_200M_200_20-colnorms.txt'
data1 = parse_normalized(path1, cols_path)
data2 = parse(path1)
path3 = 'data/synthetic_200M_200_20//NMF_200M_200_20-proj.txt'
data3 = parse_normalized(path3, cols_path)
orig_cols = [0, 1] + range(20, 200, 10)
H_orig = []
with open ('data/synthetic_200M_200_20/Hprime_20_200.txt') as f:
for line in f:
H_orig.append([float(v) for v in line.split()])
H_orig = np.array(H_orig)
H_orig_all = np.zeros((200, 200))
for col in orig_cols:
H_orig_all[col, col] = 1.0
print H_orig.shape
rs = []
numcols = range(2, 22, 1)
for cols in numcols:
cols1, H1, resid1 = compute_extreme_pts(data1, cols, 'SPA', cols_path)
cols2, H2, resid2 = compute_extreme_pts(data2, cols, 'xray')
cols3, H3, resid3 = compute_extreme_pts(data3, cols, 'GP', cols_path)
rs.append((resid1, resid2, resid3))
visualize_resids(numcols, rs, 'synth_exact_residuals')
r = 20
# This is one of the biggest hacks I have ever done. For some reason,
# pyplot screws up the first plot. Thus, we plot a dummy plot.
cols0, H0, resid0 = compute_extreme_pts(data1, r, 'SPA', cols_path)
visualize(H0, cols0, 'dummy', 'synth_noisy_coeffs_dummy')
cols0.sort()
print cols0
print H_orig
not_orig_cols = [x for x in range(200) if x not in orig_cols]
for ind1, i in enumerate(orig_cols):
for ind2, j in enumerate(not_orig_cols):
H_orig_all[i, j] = H_orig[ind1, ind2]
imshow_wrapper(H_orig_all, title='Generation', fname='synth_exact_coeffs_gen')
cols1, H1, resid1 = compute_extreme_pts(data1, r, 'SPA', cols_path)
visualize(H1, cols1, 'SPA', 'synth_exact_coeffs_SPA')
cols1.sort()
print cols1
cols2, H2, resid2 = compute_extreme_pts(data2, r, 'xray')
visualize(H2, cols2, 'XRAY', 'synth_exact_coeffs_XRAY')
cols2.sort()
print cols2
cols3, H3, resid3 = compute_extreme_pts(data3, r, 'GP', cols_path)
visualize(H3, cols3, 'GP', 'synth_exact_coeffs_GP')
cols3.sort()
print cols3
visualize_cols([cols1, cols2, cols3, orig_cols], H3.shape[1],
['SPA', 'XRAY', 'GP', 'Generation'],
'synth_exact_cols')
| arbenson/mrnmf | plotting/synthetic_exact_plots.py | Python | bsd-2-clause | 2,778 |
"""
ve_phantom.py - setup sge scripts to launch sims on the cluster
Guassian excitation sims for UWM for soft, VE phantoms and processing phase
velocity information.
"""
__author__ = 'Mark Palmeri'
__date__ = '2014-10-16'
import os
# define some stuff
G0 = [10.0] # kPa
GI = [1.0] # kPa
ETA = [0.01, 1.0, 3.0, 6.0, 9.0]
GAUSS = [0.1] # sigma [cm^-1]
EXC_DUR = range(100, 300, 400) # us
root = '/pisgah/mlp6/scratch/ve_phantom'
femgit = '/home/mlp6/git/fem'
swdyn = 've_phantom.dyn'
SGE_FILENAME = 've_phantom.sge'
for i in range(len(G0)):
for ii in range(len(GI)):
for iii in range(len(ETA)):
for j in range(len(GAUSS)):
for k in range(len(EXC_DUR)):
# compute BETA from the user-defined variables
BETA = (G0[i] * 1e4 - GI[ii] * 1e4) / ETA[iii]
# negative BETA is not physically realistic
if BETA < 0:
break
sim_path = '%s/G0%.1fkPa/GI%.1fkPa/BETA%.1f/GAUSS_%.2f_%.2f/EXCDUR_%ius/' % (root, G0[i], GI[ii], BETA, GAUSS[j], GAUSS[j], EXC_DUR[k])
if not os.path.exists(sim_path):
os.makedirs(sim_path)
os.chdir(sim_path)
print(os.getcwd())
if not os.path.exists('res_sim.mat'):
os.system('cp %s/%s .' % (root, swdyn))
os.system("sed -i -e 's/G0/%.1f/' %s" %
(G0[i] * 10000.0, swdyn))
os.system("sed -i -e 's/GI/%.1f/' %s" %
(GI[ii] * 10000.0, swdyn))
os.system("sed -i -e 's/BETA/%.1f/' %s" %
(BETA, swdyn))
os.system("sed -i -e 's/TOFF1/%.1f/' %s" %
(EXC_DUR[k], swdyn))
os.system("sed -i -e 's/TOFF2/%.1f/' %s" %
(EXC_DUR[k] + 1, swdyn))
# create link to loads.dyn based on Guassian width
os.system("ln -fs %s/gauss/gauss_exc_sigma_%.3f_%.3f_"
"1.000_center_0.000_0.000_-3.000_amp_1.000_"
"amp_cut_0.050_qsym.dyn loads.dyn" %
(root, GAUSS[j], GAUSS[j]))
os.system("ln -fs %s/mesh/nodes.dyn" % root)
os.system("ln -fs %s/mesh/elems.dyn" % root)
os.system("ln -fs %s/mesh/bc.dyn" % root)
#os.system("cp %s/amanda_exclude ./.exclude" % root)
# create sge output file
SGE = open('%s' % SGE_FILENAME, 'w')
SGE.write('#!/bin/bash\n')
SGE.write('#$ -q high.q\n')
#SGE.write('#$ -l num_proc=24\n')
SGE.write('#$ -l mem_free=1G\n')
SGE.write('#$ -pe smp 12\n')
SGE.write('date\n')
SGE.write('hostname\n')
SGE.write('export DISPLAY=\n')
SGE.write('ls-dyna-d ncpu=$NSLOTS i=%s\n' % (swdyn))
SGE.write('rm d3*\n')
SGE.write('python %s/post/create_disp_dat.py '
'--nodout nodout\n' % (femgit))
SGE.write('python %s/post/create_res_sim_mat.py '
'--dynadeck %s \n' % (femgit, swdyn))
SGE.write('if [ -e disp.dat ]; '
'then rm nodout; fi\n')
SGE.write('gzip -v disp.dat\n')
SGE.close()
os.system('qsub --bash %s' % (SGE_FILENAME))
else:
print('res_sim.mat already exists')
| Guokr1991/cervix | ve_phantom/ve_phantom.py | Python | mit | 3,973 |
#!/usr/bin/env python
"""
Created on Tue Oct 18 09:11:38 2016
@author: Admin
"""
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
from numpy import linalg as LA
from mpl_toolkits.mplot3d import Axes3D
def Verp(ovel, opos, dt, a):
"Position:"
pos = opos + ovel*dt + .5*a*dt**2
return pos
def Verv(pos, mass, ovel, dt, a, e):
"Velocities:"
an, pe, ke = acc(pos, mass, ovel, e)
vel = ovel + .5*(a + an)*dt
return vel
def acc(pos, mass, vel, e):
a = np.zeros((N,3))
pe = np.zeros((N,1))
ke = np.zeros((N,1))
G = 6.67408*10**-11
for i in range(0,N-1):
for j in range(i+1,N):
vi = LA.norm(vel[i])
vj = LA.norm(vel[j])
r = pos[i]-pos[j]
m = LA.norm(r)
F = -(G/(m+e)**3)*r
a[i] += F*mass[j]
a[j] += -F*mass[i]
pe[i] += (G*mass[i]*mass[j])/(m+e)
pe[j] += (G*mass[j]*mass[i])/(m+e)
ke[i] += .5*mass[i]*vi**2
ke[j] += .5*mass[j]*vj**2
return a, pe, ke
AU = 149597871000
Ms = 1.989*10**30
Me = 5.972*10**24
Ma = 6.39*10**23
AUa = 1.524*AU
"Defining Variables"
N = 3
t_max = 3.1556e7; t = 0
dt_max = 100
v = (2*np.pi*AU)/t_max
mass = np.array([Ms,Me,Ma])
pos = np.zeros((N,3))
vel = np.zeros((N,3))
pos[1] = np.array([0,0.31*AU,0.05*AU])
pos[2] = np.array([0,0.3*AUa,0])
vel[1] = np.array([v,0,0])
vel[2] = np.array([v,0,0])
e = 0.01*AU; n = 100/e
a0 = []; Ta = []
b0 = []; Tb = []
c0 = []; Tc = []
while t < t_max:
ac, pe, ke = acc(pos, mass, vel, e)
dt_grav = np.min([dt_max,((2*n*e)/(LA.norm(ac)))**.5])
"Verlet Method"
opos = pos
ovel = vel
pos = Verp(ovel, opos, dt_grav, ac)
vel = Verv(pos, mass, ovel, dt_grav, ac, e)
t += dt_grav
"""dump pos into file"""
a0.append(pos[0])
b0.append(pos[1])
c0.append(pos[2])
"""dump energies into file"""
Ta.append(pe[0] - ke[0])
Tb.append(pe[1] - ke[1])
Tc.append(pe[2] - ke[2])
a = np.zeros((len(a0),3))
b = np.zeros((len(b0),3))
c = np.zeros((len(c0),3))
for i in range (0,len(a0)):
a[i] = a0[i]
b[i] = b0[i]
c[i] = c0[i]
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
plt.plot(a[:,0],a[:,1],a[:,2]); plt.plot(b[:,0],b[:,1],b[:,2]); plt.plot(c[:,0],c[:,1],c[:,2])
plt.figure()
plt.plot(Ta), plt.plot(Tb); plt.plot(Tc) | Gezerj/Python | Verlet-N-body.py | Python | mit | 2,472 |
'''
New Integration Test for hybrid.
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
test_obj_dict = test_state.TestStateDict()
test_stub = test_lib.lib_get_test_stub()
hybrid = test_stub.HybridObject()
def test():
hybrid.add_datacenter_iz()
hybrid.del_iz()
test_util.test_pass('Add Delete Identity Zone Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
| zstackorg/zstack-woodpecker | integrationtest/vm/hybrid/test_add_iz.py | Python | apache-2.0 | 623 |
import unittest
from unittest import TestCase
from operator import le, gt
import pandas as pd
import numpy as np
from bartpy.data import Data, format_covariate_matrix, make_bartpy_data
from bartpy.initializers.sklearntreeinitializer import map_sklearn_tree_into_bartpy
from bartpy.mutation import TreeMutation, PruneMutation
from bartpy.node import split_node, LeafNode, DecisionNode
from bartpy.tree import mutate, Tree
from bartpy.split import Split, SplitCondition
class TestTreeStructureNodeRetrieval(TestCase):
def setUp(self):
X = format_covariate_matrix(pd.DataFrame({"a": [1, 2, 3], "b": [1, 2, 3]}))
data = Data(X, np.array([1, 2, 3]).astype(float))
split = Split(data)
node = LeafNode(split)
self.a = split_node(node, (SplitCondition(0, 1, le), SplitCondition(0, 1, gt)))
self.tree = Tree([self.a, self.a.left_child, self.a.right_child])
self.c = split_node(self.a._right_child, (SplitCondition(1, 2, le), SplitCondition(1, 2, gt)))
mutate(self.tree, TreeMutation("grow", self.a.right_child, self.c))
self.b = self.a.left_child
self.d = self.c.left_child
self.e = self.c.right_child
def test_retrieve_all_nodes(self):
all_nodes = self.tree.nodes
for node in [self.a, self.b, self.c, self.d, self.e]:
self.assertIn(node, all_nodes)
for node in all_nodes:
self.assertIn(node, [self.a, self.b, self.c, self.d, self.e])
def test_retrieve_all_leaf_nodes(self):
all_nodes = self.tree.leaf_nodes
true_all_nodes = [self.d, self.e, self.b]
for node in true_all_nodes:
self.assertIn(node, all_nodes)
for node in all_nodes:
self.assertIn(node, true_all_nodes)
def test_retrieve_all_leaf_parents(self):
all_nodes = self.tree.prunable_decision_nodes
true_all_nodes = [self.c]
for node in true_all_nodes:
self.assertIn(node, all_nodes)
for node in all_nodes:
self.assertIn(node, true_all_nodes)
def test_retrieve_all_split_nodes(self):
all_nodes = self.tree.decision_nodes
true_all_nodes = [self.c, self.a]
for node in true_all_nodes:
self.assertIn(node, all_nodes)
for node in all_nodes:
self.assertIn(node, true_all_nodes)
class TestTreeStructureDataUpdate(TestCase):
def setUp(self):
X = format_covariate_matrix(pd.DataFrame({"a": [1, 2, 3], "b": [1, 2, 3]}))
self.data = Data(X, np.array([1, 2, 3]).astype(float))
self.a = split_node(LeafNode(Split(self.data)), (SplitCondition(0, 1, le), SplitCondition(0, 1, gt)))
self.b = self.a.left_child
self.x = self.a.right_child
self.tree = Tree([self.a, self.b, self.x])
self.c = split_node(self.a._right_child, (SplitCondition(1, 2, le), SplitCondition(1, 2, gt)))
mutate(self.tree, TreeMutation("grow", self.x, self.c))
self.d = self.c.left_child
self.e = self.c.right_child
def test_update_pushed_through_split(self):
updated_y = np.array([5, 6, 7])
self.tree.update_y(updated_y)
# Left child keeps LTE condition
self.assertListEqual([5, 6, 7], list(self.a.data.y.values))
self.assertListEqual([5], list(self.b.data.y.values[~self.b.data.y._mask]))
self.assertListEqual([6, 7], list(self.c.data.y.values[~self.c.data.y._mask]))
self.assertListEqual([6], list(self.d.data.y.values[~self.d.data.y._mask]))
self.assertListEqual([7], list(self.e.data.y.values[~self.e.data.y._mask]))
class TestTreeStructureMutation(TestCase):
def setUp(self):
self.data = Data(format_covariate_matrix(pd.DataFrame({"a": [1]})), np.array([1]).astype(float))
self.d = LeafNode(Split(self.data), None)
self.e = LeafNode(Split(self.data), None)
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_starts_right(self):
self.assertListEqual([self.c], self.tree.prunable_decision_nodes)
for leaf in [self.b, self.d, self.e]:
self.assertIn(leaf, self.tree.leaf_nodes)
def test_invalid_prune(self):
with self.assertRaises(TypeError):
updated_a = LeafNode(Split(self.data))
PruneMutation(self.a, updated_a)
def test_grow(self):
f, g = LeafNode(Split(self.data)), LeafNode(Split(self.data))
updated_d = DecisionNode(Split(self.data), f, g)
grow_mutation = TreeMutation("grow", self.d, updated_d)
mutate(self.tree, grow_mutation)
self.assertIn(updated_d, self.tree.decision_nodes)
self.assertIn(updated_d, self.tree.prunable_decision_nodes)
self.assertIn(f, self.tree.leaf_nodes)
self.assertNotIn(self.d, self.tree.nodes)
def test_head_prune(self):
b, c = LeafNode(Split(self.data)), LeafNode(Split(self.data))
a = DecisionNode(Split(self.data), b, c)
tree = Tree([a, b, c])
updated_a = LeafNode(Split(self.data))
prune_mutation = PruneMutation(a, updated_a)
mutate(tree, prune_mutation)
self.assertIn(updated_a, tree.leaf_nodes)
self.assertNotIn(self.a, tree.nodes)
def test_internal_prune(self):
updated_c = LeafNode(Split(self.data))
prune_mutation = TreeMutation("prune", self.c, updated_c)
mutate(self.tree, prune_mutation)
self.assertIn(updated_c, self.tree.leaf_nodes)
self.assertNotIn(self.c, self.tree.nodes)
self.assertNotIn(self.d, self.tree.nodes)
self.assertNotIn(self.e, self.tree.nodes)
class TestSklearnToBartPyTreeMapping(unittest.TestCase):
def setUp(self):
self.X = np.random.normal(size=20)
self.y = self.X + np.random.normal(scale=0.1, size=20)
self.data = make_bartpy_data(pd.DataFrame({"a": self.X}), self.y, normalize=False)
def test_same_prediction(self):
from sklearn.ensemble import GradientBoostingRegressor
params = {'n_estimators': 1, 'max_depth': 2, 'min_samples_split': 2,
'learning_rate': 0.8, 'loss': 'ls'}
sklearn_model = GradientBoostingRegressor(**params)
sklearn_model.fit(self.data.X.values, self.data.y.values)
sklearn_tree = sklearn_model.estimators_[0][0].tree_
bartpy_tree = Tree([LeafNode(Split(self.data))])
map_sklearn_tree_into_bartpy(bartpy_tree, sklearn_tree)
sklearn_predictions = sklearn_tree.predict(self.data.X.values.astype(np.float32))
sklearn_predictions = [round(x, 2) for x in sklearn_predictions.reshape(-1)]
bartpy_tree.cache_up_to_date = False
bartpy_tree_predictions = bartpy_tree.predict(self.data.X.values)
bartpy_tree_predictions = [round(x, 2) for x in bartpy_tree_predictions]
self.assertListEqual(sklearn_predictions, bartpy_tree_predictions)
if __name__ == '__main__':
unittest.main()
| JakeColtman/bartpy | tests/test_tree.py | Python | mit | 7,128 |
from south.db import db
from django.db import models
from cms.plugins.video.models import *
class Migration:
no_dry_run = True
def forwards(self, orm):
for video in orm.Video.objects.all():
try:
video._i_height = int(video.height)
except ValueError:
video._i_height = 0
try:
video._i_width = int(video.width)
except ValueError:
video._i_width = 0
video.save()
def backwards(self, orm):
for video in orm.Video.objects.all():
video.height = str(video._i_height)
video.width = str(video._i_width)
video.save()
models = {
'cms.cmsplugin': {
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.CMSPlugin']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.page': {
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'menu_login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '1', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'sites.site': {
'Meta': {'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'video.video': {
'Meta': {'db_table': "'cmsplugin_video'"},
'_i_height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'_i_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'auto_hide': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'auto_play': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'bgcolor': ('django.db.models.fields.CharField', [], {'default': "'000000'", 'max_length': '6'}),
'buttonhighlightcolor': ('django.db.models.fields.CharField', [], {'default': "'FFFFFF'", 'max_length': '6'}),
'buttonoutcolor': ('django.db.models.fields.CharField', [], {'default': "'333333'", 'max_length': '6'}),
'buttonovercolor': ('django.db.models.fields.CharField', [], {'default': "'000000'", 'max_length': '6'}),
'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'fullscreen': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.CharField', [], {'max_length': '6'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'loadingbarcolor': ('django.db.models.fields.CharField', [], {'default': "'828282'", 'max_length': '6'}),
'loop': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'movie': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'movie_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'seekbarbgcolor': ('django.db.models.fields.CharField', [], {'default': "'333333'", 'max_length': '6'}),
'seekbarcolor': ('django.db.models.fields.CharField', [], {'default': "'13ABEC'", 'max_length': '6'}),
'textcolor': ('django.db.models.fields.CharField', [], {'default': "'FFFFFF'", 'max_length': '6'}),
'width': ('django.db.models.fields.CharField', [], {'max_length': '6'})
}
}
complete_apps = ['video']
| team-xue/xue | xue/cms/plugins/video/migrations/0004_newplayer_step2.py | Python | bsd-3-clause | 8,111 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2020-12-03 21:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('baza', '0006_auto_20201203_2236'),
]
operations = [
migrations.RemoveField(
model_name='dopuszczenialegalizacje',
name='termin_wykonania_zalecen',
),
migrations.AddField(
model_name='dopuszczenialegalizacje',
name='termin_wykonania_polecenia',
field=models.DateField(blank=True, null=True, verbose_name='termin wykonania zalecenia'),
),
]
| szymanskirafal/ab | baza/migrations/0007_auto_20201203_2257.py | Python | mit | 669 |
from _pydev_imps._pydev_saved_modules import threading
def wrapper(fun):
def pydev_after_run_call():
pass
def inner(*args, **kwargs):
fun(*args, **kwargs)
pydev_after_run_call()
return inner
def wrap_attr(obj, attr):
t_save_start = getattr(obj, attr)
setattr(obj, attr, wrapper(t_save_start))
setattr(obj, "_pydev_run_patched", True)
class ObjectWrapper(object):
def __init__(self, obj):
self.wrapped_object = obj
try:
import functools
functools.update_wrapper(self, obj)
except:
pass
def __getattr__(self, attr):
orig_attr = getattr(self.wrapped_object, attr) #.__getattribute__(attr)
if callable(orig_attr):
def patched_attr(*args, **kwargs):
self.call_begin(attr)
result = orig_attr(*args, **kwargs)
self.call_end(attr)
if result == self.wrapped_object:
return self
return result
return patched_attr
else:
return orig_attr
def call_begin(self, attr):
pass
def call_end(self, attr):
pass
def __enter__(self):
self.call_begin("__enter__")
self.wrapped_object.__enter__()
self.call_end("__enter__")
def __exit__(self, exc_type, exc_val, exc_tb):
self.call_begin("__exit__")
self.wrapped_object.__exit__(exc_type, exc_val, exc_tb)
def factory_wrapper(fun):
def inner(*args, **kwargs):
obj = fun(*args, **kwargs)
return ObjectWrapper(obj)
return inner
def wrap_threads():
# TODO: add wrappers for thread and _thread
# import _thread as mod
# print("Thread imported")
# mod.start_new_thread = wrapper(mod.start_new_thread)
import threading
threading.Lock = factory_wrapper(threading.Lock)
threading.RLock = factory_wrapper(threading.RLock)
# queue patching
try:
import queue # @UnresolvedImport
queue.Queue = factory_wrapper(queue.Queue)
except:
import Queue
Queue.Queue = factory_wrapper(Queue.Queue)
| asedunov/intellij-community | python/helpers/pydev/pydevd_concurrency_analyser/pydevd_thread_wrappers.py | Python | apache-2.0 | 2,162 |
####### LICENSE #######
# This code is part of the Recombineering module, written by Gregory
# Moyerbrailean at Michigan State University, Department of Microbiology
# and Molecular Genetics.
# Copyright (C) 2010 Gregory Moyerbrailean
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''Handles the local BLAST and the parsing of the results.
The BLAST uses the NCBI blast+ command-line tools to run a local BLAST against
the organism's genome. In the event that a closed genome is not available for
a species, the genome of a closely related strain can be used in its place.
When a hit has been found, the parser function will extract and return relevant
information regarding the corresponding gene.
Alternatively, the user may specify to disable the BLAST function. In this case,
the module will use the scaffold files to extract the necessary information.
The user therefore does not have to have the blast+ command-line tools.
However, the user will also not be able to run organisms such as L. reuteri
against a similar genome, as this method requires exact gene matches.'''
import subprocess
from Bio.Blast.Applications import NcbiblastnCommandline as ncl
from Bio.Blast import NCBIXML as nxml
def BlastGenome(queryFile,genome,debug,outputFile='Files/extras/temp_blast.xml'):
if debug:
print "In BLASTing.BlastGenome"
# Modify the genome filename to reflect the path to the genome
genome = genome.replace(' ','')
genomePath = 'Files/genome/' + genome + '/' + genome
## Call blast+ from python
cline = ncl(query=queryFile,db=genomePath,out=outputFile,outfmt=5)
ret_code = subprocess.call(str(cline),shell=True)
if ret_code:
print 'BLASTing file "%s" returned error code %s' % (queryFile,ret_code)
temp = open(queryFile).read()
geneID = temp.split()[0]
geneID = geneID.lstrip('>')
result = nxml.read(open(outputFile))
# If the blast returns no results, it will be treated as a gene
# in the ambiguous region and oligos will be made from both strands
if result.alignments:
return parseRecord(result,genomePath,debug)
else:
return 0,0,'Ambiguous','No Match','N/A'
def parseRecord(xmlfile,genomePath,debug):
if debug:
print "In BLASTing.parseRecord"
result = nxml.read(open('Files/extras/temp_blast.xml'))
hit = result.alignments[0].hit_def
e = result.descriptions[0].e
if debug:
print "Blast match: ",hit
print "E-value: ",e
hitL = hit.split()
hitID = hitL[0]
t = [n for n in hitL if '..' in n]
hitInfo = t[0]
num1,num2 = hitInfo.split('..')
num2 = num2[:num2.find('(')]
num1,num2 = int(num1),int(num2)
strand = hitInfo[hitInfo.find('('):]
# Determine the direction, relative location, and position of the gene
direction = getDirection(hitInfo)
termUpper,termLower = getRelativeLocation(genomePath)
pos = getLocation(num1,termUpper,termLower)
# TODO
# Integrate warning for multiple hits
return num1,direction,pos,hit,e,''
def SearchGenome(queryFile,genomeName,debug):
from Bio import SeqIO
genomePath = 'Files/genome/'+genomeName+'/'+genomeName
genome = openGenome(genomePath)
high,low = getRelativeLocation(genomePath)
gene = SeqIO.read(open(queryFile),'fasta')
geneStr = str(gene.seq)
geneComp = str(gene.seq.reverse_complement())
count = 0
if geneStr in genome:
direction = 'forward'
n = genome.find(geneStr)
pos = getLocation(n,high,low)
count += genome.count(geneStr)
elif geneComp in genome:
direction = 'reverse'
n = genome.find(geneComp)
pos = getLocation(n,high,low)
count += genome.count(geneComp)
else:
return 0,0,'Ambiguous','No Match','N/A',''
# If the gene sequence is present more than once, issue a warning
bWarn = 'Warning: Gene sequence detected multiple times in genome'
return n,direction,pos,'No BLAST data','No BLAST data',bWarn
def getRelativeLocation(genomePath):
l,t = getTermRegion(genomePath+'.txt')
buff = 0.05 * l
high = t + buff
low = t - buff
return high,low
def getTermRegion(path):
fd = open(path)
info = fd.read()
l,t = info.split('\n')
l,t = int(l),int(t)
return l,t
def getDirection(line):
if '(+)' in line:
d = 'forward'
elif '(-)' in line:
d = 'reverse'
return d
def getLocation(num,high,low):
if num < low:
p = 'Early'
elif num > high:
p = 'Late'
else:
p = 'Ambiguous'
return p
def openGenome(gpath):
fd = open(gpath+'.fasta')
g = fd.read()
g = g.replace('\n','')
return g
| gmoyerbrailean/PyRec | Files/BLASTing.py | Python | gpl-2.0 | 5,396 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
"""Utilities used for translating operators from Onnx to Mxnet."""
# pylint: disable=protected-access
from __future__ import absolute_import as _abs
from .... import symbol
from .... import module
from .... import context
from .... import ndarray as nd
from .... import io
def _fix_attribute_names(attrs, change_map):
"""
Change attribute names as per values in change_map dictionary.
Parameters
----------
:param attrs : dict Dict of operator attributes
:param change_map : dict Dict of onnx attribute name to mxnet attribute names.
Returns
-------
:return new_attr : dict Converted dict of operator attributes.
"""
new_attr = {}
for k in attrs.keys():
if k in change_map:
new_attr[change_map[k]] = attrs[k]
else:
new_attr[k] = attrs[k]
return new_attr
def _remove_attributes(attrs, remove_list):
"""
Removes attributes in the remove list from the input attribute dict
:param attrs : Dict of operator attributes
:param remove_list : list of attributes to be removed
:return new_attr : Dict of operator attributes without the listed attributes.
"""
new_attrs = {}
for attr in attrs.keys():
if attr not in remove_list:
new_attrs[attr] = attrs[attr]
return new_attrs
def _add_extra_attributes(attrs, extra_attr_map):
"""
:param attrs: Current Attribute list
:param extraAttrMap: Additional attributes to be added
:return: new_attr
"""
for attr in extra_attr_map:
if attr not in attrs:
attrs[attr] = extra_attr_map[attr]
return attrs
def _pad_sequence_fix(attr, kernel_dim=None):
"""Changing onnx's pads sequence to match with mxnet's pad_width
mxnet: (x1_begin, x1_end, ... , xn_begin, xn_end)
onnx: (x1_begin, x2_begin, ... , xn_end, xn_end)"""
new_attr = ()
if len(attr) % 2 == 0:
for index in range(int(len(attr) / 2)):
new_attr = new_attr + attr[index::int(len(attr) / 2)]
# Making sure pad values are in the attr for all axes.
if kernel_dim is not None:
while len(new_attr) < kernel_dim*2:
new_attr = new_attr + (0, 0)
return new_attr
def _fix_pooling(pool_type, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
stride = new_attr.get('stride')
kernel = new_attr.get('kernel')
padding = new_attr.get('pad')
p_value = new_attr.get('p_value')
# Adding default stride.
if stride is None:
stride = (1,) * len(kernel)
# Add padding attr if not provided.
if padding is None:
padding = (0,) * len(kernel) * 2
# Mxnet Pad operator supports only 4D/5D tensors.
# For 1D case, these are the steps:
# Step 1. Add extra dummy dimension to make it 4D. Adding to axis = 2
# Step 2. Apply padding to this changed tensor
# Step 3. Remove the extra dimension added in step 1.
if len(kernel) == 1:
dummy_axis = 2
# setting 0 padding to the new dim to be added.
padding = (0, padding[0], 0, padding[1])
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, kernel_dim=2)
# Step 1.
curr_sym = symbol.expand_dims(inputs[0], axis=dummy_axis)
# Step 2. Common for all tensor sizes
new_pad_op = symbol.pad(curr_sym, mode='edge', pad_width=pad_width)
# Step 3: Removing extra dim added.
new_pad_op = symbol.split(new_pad_op, axis=dummy_axis, num_outputs=1, squeeze_axis=1)
else:
# For 2D/3D cases:
# Apply padding
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, kernel_dim=len(kernel))
curr_sym = inputs[0]
if pool_type == 'max':
# For max pool : mode = 'edge', we should replicate the
# edge values to pad, so that we only include input data values
# for calculating 'max'
new_pad_op = symbol.pad(curr_sym, mode='edge', pad_width=pad_width)
else:
# For avg pool, we should add 'zeros' for padding so mode='constant'
new_pad_op = symbol.pad(curr_sym, mode='constant', pad_width=pad_width)
# Apply pooling without pads.
if pool_type == 'lp':
new_pooling_op = symbol.Pooling(new_pad_op, pool_type=pool_type, stride=stride, kernel=kernel, p_value=p_value)
else:
new_pooling_op = symbol.Pooling(new_pad_op, pool_type=pool_type, stride=stride, kernel=kernel)
return new_pooling_op
def _fix_bias(op_name, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op_name))
return attrs
def _fix_broadcast(op_name, inputs, broadcast_axis, proto_obj):
"""A workaround to reshape bias term to (1, num_channel)."""
if int(len(proto_obj._params)) > 0:
assert len(list(inputs)) == 2
input0_shape = get_input_shape(inputs[0], proto_obj)
#creating reshape shape
reshape_shape = list(len(input0_shape) * (1,))
reshape_shape[broadcast_axis] = -1
reshape_shape = tuple(reshape_shape)
reshape_op_sym = symbol.reshape(inputs[1], shape=reshape_shape)
op_sym = getattr(symbol, op_name)(inputs[0], reshape_op_sym)
else:
op_sym = op_name
return op_sym
def _fix_channels(op_name, attrs, inputs, proto_obj):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
weight_name = inputs[1].name
if not weight_name in proto_obj._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
wshape = proto_obj._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op_name == 'FullyConnected':
attrs['num_hidden'] = wshape[0]
else:
if op_name == 'Convolution':
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op_name == 'Deconvolution':
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
def _fix_gemm(op_name, inputs, old_attr, proto_obj):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op_sym = getattr(symbol, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
trans_a = int(old_attr.get('transA', 0))
trans_b = int(old_attr.get('transB', 0))
if trans_a:
inputs[0] = symbol.transpose(inputs[0], axes=(1, 0))
if not trans_b:
inputs[1] = symbol.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : proto_obj._params[inputs[2].name].shape[0]}
return op_sym, new_attr, new_inputs
def get_input_shape(sym, proto_obj):
"""Helper function to obtain the shape of an array"""
arg_params = proto_obj.arg_dict
aux_params = proto_obj.aux_dict
model_input_shape = [data[1] for data in proto_obj.model_metadata.get('input_tensor_data')]
data_names = [data[0] for data in proto_obj.model_metadata.get('input_tensor_data')]
# creating dummy inputs
inputs = []
for in_shape in model_input_shape:
inputs.append(nd.ones(shape=in_shape))
data_shapes = []
for idx, input_name in enumerate(data_names):
data_shapes.append((input_name, inputs[idx].shape))
ctx = context.cpu()
# create a module
mod = module.Module(symbol=sym, data_names=data_names, context=ctx, label_names=None)
mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None)
mod.set_params(arg_params=arg_params, aux_params=aux_params)
data_forward = []
for idx, input_name in enumerate(data_names):
val = inputs[idx]
data_forward.append(val)
mod.forward(io.DataBatch(data_forward))
result = mod.get_outputs()[0].asnumpy()
return result.shape
| reminisce/mxnet | python/mxnet/contrib/onnx/onnx2mx/_translation_utils.py | Python | apache-2.0 | 9,344 |
# encoding: utf-8
import functools
from pytest import raises as assert_raises
from rhino.util import dual_use_decorator, dual_use_decorator_method, \
get_args, sse_event
@dual_use_decorator
def decorator_function(*args, **kw):
"""decorator function"""
decorator_args = (args, kw)
def decorator(fn):
@functools.wraps(fn)
def wrapper(*args, **kw):
return {'func_args': (args, kw),
'decorator_args': decorator_args}
return wrapper
return decorator
class MyClass(object):
@dual_use_decorator_method
def decorator_method(self, *args, **kw):
"""decorator method"""
decorator_args = (args, kw)
def decorator(fn):
@functools.wraps(fn)
def wrapper(*args, **kw):
return {'func_args': (args, kw),
'decorator_args': decorator_args}
return wrapper
return decorator
def test_docstrings():
assert decorator_function.__doc__ == 'decorator function'
assert MyClass.decorator_method.__doc__ == 'decorator method'
def test_dual_use_decorator():
@decorator_function
def foo():
"""foo function"""
pass
assert foo(1, a=2) == {
'func_args': ((1,), {'a': 2}),
'decorator_args': (tuple(), {}),
}
assert foo.__doc__ == 'foo function'
def test_dual_use_decorator_with_args():
@decorator_function(3, b=4)
def foo():
"""foo function"""
pass
assert foo(1, a=2) == {
'func_args': ((1,), {'a': 2}),
'decorator_args': ((3,), {'b': 4}),
}
assert foo.__doc__ == 'foo function'
def test_dual_use_decorator_method():
obj = MyClass()
@obj.decorator_method
def foo():
"""foo function"""
pass
assert foo(1, a=2) == {
'func_args': ((1,), {'a': 2}),
'decorator_args': (tuple(), {}),
}
assert foo.__doc__ == 'foo function'
def test_dual_use_decorator_method_with_args():
obj = MyClass()
@obj.decorator_method(3, b=4)
def foo():
"""foo function"""
pass
assert foo(1, a=2) == {
'func_args': ((1,), {'a': 2}),
'decorator_args': ((3,), {'b': 4}),
}
assert foo.__doc__ == 'foo function'
def test_get_args():
class Foo(object):
def __init__(self, a, b):
pass
def __call__(self, e, f):
pass
def foo(self, g, h):
pass
@classmethod
def bar(cls, i, j):
pass
@staticmethod
def baz(k, l):
pass
foo = Foo(None, None)
assert get_args(lambda x: None) == ['x']
assert get_args(Foo) == ['a', 'b']
assert get_args(Foo.foo) == ['g', 'h']
assert get_args(Foo.bar) == ['i', 'j']
assert get_args(Foo.baz) == ['k', 'l']
assert get_args(foo) == ['e', 'f']
assert get_args(foo.foo) == ['g', 'h']
assert get_args(foo.bar) == ['i', 'j']
assert get_args(foo.baz) == ['k', 'l']
assert_raises(TypeError, get_args, None)
def test_sse_event():
assert sse_event('test', 'foo\nbar') == \
'''\
event: test
data: foo
data: bar
'''
assert sse_event(comment='a\nb\n') == \
'''\
: a
: b
:
'''
assert sse_event(
event='test', data='foo', id='id', retry=12, comment='hi') == \
'''\
: hi
id: id
event: test
retry: 12
data: foo
'''
def test_sse_event_newlines():
assert sse_event(comment='a\rb\nc\r\nd') == ': a\n: b\n: c\n: d\n\n'
assert sse_event(comment='a\n\n') == ': a\n: \n: \n\n'
assert sse_event(comment='a\r') == ': a\n: \n\n'
def test_sse_event_minimal():
assert sse_event(comment='') == ': \n\n'
def test_sse_event_retry():
assert sse_event(retry=1) == 'retry: 1\n\n'
assert sse_event(retry='1') == 'retry: 1\n\n'
assert_raises(ValueError, sse_event, retry='a')
def test_sse_event_invalid_newlines():
assert_raises(ValueError, sse_event, event='a\n')
assert_raises(ValueError, sse_event, id='a\r')
def test_sse_event_empty():
assert_raises(TypeError, sse_event)
def test_sse_event_unicode():
assert sse_event(comment=u'★') == u': ★\n\n'.encode('utf-8')
| trendels/rhino | test/test_util.py | Python | mit | 4,184 |
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
assert os.path.isfile(os.path.join(BASE_DIR, 'manage.py'))
DEBUG = True
VAR_ROOT = os.path.join(BASE_DIR, 'var')
SECRET_KEY = 'top_secret'
if not os.path.isdir(VAR_ROOT):
print('Creating var root %s' % VAR_ROOT)
os.makedirs(VAR_ROOT)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test_db',
}
}
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sites',
'cms',
'menus',
'treebeard',
'cmsplugin_fontawesome_links_ai',
]
LANGUAGES = [
("en-us", "English"),
]
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
],
},
},
]
ROOT_URLCONF = 'urls'
SITE_ID = 1
STATIC_ROOT = os.path.join(VAR_ROOT, 'static')
MEDIA_ROOT = os.path.join(VAR_ROOT, 'media')
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
WSGI_APPLICATION = 'test_wsgi.application'
| igordavydsson/cmsplugin-fontawesome-links-ai | test_settings.py | Python | mit | 1,405 |
"""Unit tests of MQProducer interface in the DIRAC.Resources.MessageQueue.MProducerQ
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
from DIRAC import S_OK
from DIRAC.Resources.MessageQueue.MQProducer import MQProducer
from DIRAC.Resources.MessageQueue.MQConnectionManager import MQConnectionManager
from DIRAC.Resources.MessageQueue.MQConnector import MQConnector
class FakeMQConnector(MQConnector):
def __init__(self, params={}):
super(FakeMQConnector, self).__init__()
def disconnect(self):
return S_OK("FakeMQConnection disconnecting")
def get(self, destination=''):
return "FakeMQConnection getting message"
def put(self, message, parameters=None):
return S_OK("FakeMQConnection sending message: " + str(message))
class TestMQProducer(unittest.TestCase):
def setUp(self):
self.maxDiff = None # To show full difference between structures in case of error
dest = {}
dest.update({'/queue/FakeQueue': ['producer4', 'producer2']})
dest4 = {'/queue/test3': ['producer1', 'consumer2', 'consumer3', 'consumer4']}
conn1 = {'MQConnector': FakeMQConnector(), 'destinations': dest}
conn2 = {'MQConnector': FakeMQConnector(), 'destinations': dest4}
storage = {'fake.cern.ch': conn1, 'testdir.blabla.ch': conn2}
self.myManager = MQConnectionManager(connectionStorage=storage)
def tearDown(self):
pass
class TestMQProducer_put(TestMQProducer):
def test_success(self):
producer = MQProducer(mqManager=self.myManager, mqURI="fake.cern.ch::Queues::FakeQueue", producerId='producer4')
result = producer.put("wow!")
self.assertTrue(result['OK'])
self.assertEqual(result['Value'], "FakeMQConnection sending message: wow!")
def test_failure(self):
producer = MQProducer(mqManager=self.myManager, mqURI="bad.cern.ch::Queues::FakeQueue", producerId='producer4')
result = producer.put("wow!")
self.assertFalse(result['OK'])
class TestMQProducer_close(TestMQProducer):
def test_success(self):
producer = MQProducer(mqManager=self.myManager, mqURI="fake.cern.ch::Queues::FakeQueue", producerId='producer4')
result = producer.close()
self.assertTrue(result['OK'])
# producer is still able to sent cause the connection is still active (producer2 is connected)
result = producer.put("wow!")
self.assertTrue(result['OK'])
def test_failure(self):
producer = MQProducer(mqManager=self.myManager, mqURI="fake.cern.ch::Queues::FakeQueue", producerId='producer4')
result = producer.close()
self.assertTrue(result['OK'])
result = producer.close()
self.assertFalse(result['OK'])
self.assertEqual(
result['Message'],
'MQ connection failure ( 1142 : Failed to stop the connection!The messenger producer4 does not exist!)')
def test_failure2(self):
producer = MQProducer(mqManager=self.myManager, mqURI="fake.cern.ch::Queues::FakeQueue", producerId='producer4')
producer2 = MQProducer(mqManager=self.myManager, mqURI="fake.cern.ch::Queues::FakeQueue", producerId='producer2')
result = producer.close()
self.assertTrue(result['OK'])
result = producer.close()
self.assertFalse(result['OK'])
self.assertEqual(
result['Message'],
'MQ connection failure ( 1142 : Failed to stop the connection!The messenger producer4 does not exist!)')
result = producer2.close()
self.assertTrue(result['OK'])
result = producer2.close()
self.assertFalse(result['OK'])
self.assertEqual(
result['Message'],
'MQ connection failure ( 1142 : Failed to stop the connection!The messenger producer2 does not exist!)')
# connection does not exist so put will not work
result = producer.put("wow!")
self.assertFalse(result['OK'])
self.assertEqual(result['Message'], 'Failed to get the MQConnector!')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestMQProducer)
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestMQProducer_put))
suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(TestMQProducer_close))
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
| yujikato/DIRAC | src/DIRAC/Resources/MessageQueue/test/Test_MQProducer.py | Python | gpl-3.0 | 4,234 |
if rand():
from complex_pkg.modulea import Foo as Alias
Alias() ## type Foo
else:
from complex_pkg.moduleb import Bar as Alias
Alias() ## type Bar
x = Alias()
x ## type Bar|Foo
x.method(5.5) # type int|str
if random():
import complex_pkg.modulea as module
else:
module = 1
module ## type int|modulea
| aptana/Pydev | tests/org.python.pydev.refactoring.tests/src/python/typeinference/complex.py | Python | epl-1.0 | 333 |
#!/usr/bin/env python
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""rebase.py: standalone script to batch update bench expectations.
Requires gsutil to access gs://chromium-skia-gm and Rietveld credentials.
Usage:
Copy script to a separate dir outside Skia repo. The script will create a
skia dir on the first run to host the repo, and will create/delete
temp dirs as needed.
./rebase.py --githash <githash prefix to use for getting bench data>
"""
import argparse
import filecmp
import os
import re
import shutil
import subprocess
import time
import urllib2
# googlesource url that has most recent Skia git hash info.
SKIA_GIT_HEAD_URL = 'https://skia.googlesource.com/skia/+log/HEAD'
# Google Storage bench file prefix.
GS_PREFIX = 'gs://chromium-skia-gm/perfdata'
# Regular expression for matching githash data.
HA_RE = '<a href="/skia/\+/([0-9a-f]+)">'
HA_RE_COMPILED = re.compile(HA_RE)
def get_git_hashes():
print 'Getting recent git hashes...'
hashes = HA_RE_COMPILED.findall(
urllib2.urlopen(SKIA_GIT_HEAD_URL).read())
return hashes
def filter_file(f):
if f.find('_msaa') > 0 or f.find('_record') > 0:
return True
return False
def clean_dir(d):
if os.path.exists(d):
shutil.rmtree(d)
os.makedirs(d)
def get_gs_filelist(p, h):
print 'Looking up for the closest bench files in Google Storage...'
proc = subprocess.Popen(['gsutil', 'ls',
'/'.join([GS_PREFIX, p, 'bench_' + h + '_data_skp_*'])],
stdout=subprocess.PIPE)
out, err = proc.communicate()
if err or not out:
return []
return [i for i in out.strip().split('\n') if not filter_file(i)]
def download_gs_files(p, h, gs_dir):
print 'Downloading raw bench files from Google Storage...'
proc = subprocess.Popen(['gsutil', 'cp',
'/'.join([GS_PREFIX, p, 'bench_' + h + '_data_skp_*']),
'%s/%s' % (gs_dir, p)],
stdout=subprocess.PIPE)
out, err = proc.communicate()
if err:
clean_dir(gs_dir)
return False
files = 0
for f in os.listdir(os.path.join(gs_dir, p)):
if filter_file(f):
os.remove(os.path.join(gs_dir, p, f))
else:
files += 1
if files:
return True
return False
def get_expectations_dict(f):
"""Given an expectations file f, returns a dictionary of data."""
# maps row_key to (expected, lower_bound, upper_bound) float tuple.
dic = {}
for l in open(f).readlines():
line_parts = l.strip().split(',')
if line_parts[0].startswith('#') or len(line_parts) != 5:
continue
dic[','.join(line_parts[:2])] = (float(line_parts[2]), float(line_parts[3]),
float(line_parts[4]))
return dic
def calc_expectations(p, h, gs_dir, exp_dir, repo_dir, extra_dir, extra_hash):
exp_filename = 'bench_expectations_%s.txt' % p
exp_fullname = os.path.join(exp_dir, exp_filename)
proc = subprocess.Popen(['python', 'skia/bench/gen_bench_expectations.py',
'-r', h, '-b', p, '-d', os.path.join(gs_dir, p), '-o', exp_fullname],
stdout=subprocess.PIPE)
out, err = proc.communicate()
if err:
print 'ERR_CALCULATING_EXPECTATIONS: ' + err
return False
print 'CALCULATED_EXPECTATIONS: ' + out
if extra_dir: # Adjust data with the ones in extra_dir
print 'USE_EXTRA_DATA_FOR_ADJUSTMENT.'
proc = subprocess.Popen(['python', 'skia/bench/gen_bench_expectations.py',
'-r', extra_hash, '-b', p, '-d', os.path.join(extra_dir, p), '-o',
os.path.join(extra_dir, exp_filename)],
stdout=subprocess.PIPE)
out, err = proc.communicate()
if err:
print 'ERR_CALCULATING_EXTRA_EXPECTATIONS: ' + err
return False
extra_dic = get_expectations_dict(os.path.join(extra_dir, exp_filename))
output_lines = []
for l in open(exp_fullname).readlines():
parts = l.strip().split(',')
if parts[0].startswith('#') or len(parts) != 5:
output_lines.append(l.strip())
continue
key = ','.join(parts[:2])
if key in extra_dic:
exp, lb, ub = (float(parts[2]), float(parts[3]), float(parts[4]))
alt, _, _ = extra_dic[key]
avg = (exp + alt) / 2
# Keeps the extra range in lower/upper bounds from two actual values.
new_lb = min(exp, alt) - (exp - lb)
new_ub = max(exp, alt) + (ub - exp)
output_lines.append('%s,%.2f,%.2f,%.2f' % (key, avg, new_lb, new_ub))
else:
output_lines.append(l.strip())
with open(exp_fullname, 'w') as f:
f.write('\n'.join(output_lines))
repo_file = os.path.join(repo_dir, 'expectations', 'bench', exp_filename)
if (os.path.isfile(repo_file) and
filecmp.cmp(repo_file, os.path.join(exp_dir, exp_filename))):
print 'NO CHANGE ON %s' % repo_file
return False
return True
def checkout_or_update_skia(repo_dir):
status = True
old_cwd = os.getcwd()
os.chdir(repo_dir)
print 'CHECK SKIA REPO...'
if subprocess.call(['git', 'pull'],
stderr=subprocess.PIPE):
print 'Checking out Skia from git, please be patient...'
os.chdir(old_cwd)
clean_dir(repo_dir)
os.chdir(repo_dir)
if subprocess.call(['git', 'clone', '-q', '--depth=50', '--single-branch',
'https://skia.googlesource.com/skia.git', '.']):
status = False
subprocess.call(['git', 'checkout', 'master'])
subprocess.call(['git', 'pull'])
os.chdir(old_cwd)
return status
def git_commit_expectations(repo_dir, exp_dir, update_li, h, commit,
extra_hash):
if extra_hash:
extra_hash = ', adjusted with ' + extra_hash
commit_msg = """manual bench rebase after %s%s
[email protected]
Bypassing trybots:
NOTRY=true""" % (h, extra_hash)
old_cwd = os.getcwd()
os.chdir(repo_dir)
upload = ['git', 'cl', 'upload', '-f', '--bypass-hooks',
'--bypass-watchlists', '-m', commit_msg]
branch = exp_dir.split('/')[-1]
if commit:
upload.append('--use-commit-queue')
cmds = ([['git', 'checkout', 'master'],
['git', 'pull'],
['git', 'checkout', '-b', branch, '-t', 'origin/master']] +
[['cp', '%s/%s' % (exp_dir, f), 'expectations/bench'] for f in
update_li] +
[['git', 'add'] + ['expectations/bench/%s' % i for i in update_li],
['git', 'commit', '-m', commit_msg],
upload,
['git', 'checkout', 'master'],
['git', 'branch', '-D', branch],
])
status = True
for cmd in cmds:
print 'Running ' + ' '.join(cmd)
if subprocess.call(cmd):
print 'FAILED. Please check if skia git repo is present.'
subprocess.call(['git', 'checkout', 'master'])
status = False
break
os.chdir(old_cwd)
return status
def delete_dirs(li):
for d in li:
print 'Deleting directory %s' % d
shutil.rmtree(d)
def main():
d = os.path.dirname(os.path.abspath(__file__))
os.chdir(d)
if not subprocess.call(['git', 'rev-parse'], stderr=subprocess.PIPE):
print 'Please copy script to a separate dir outside git repos to use.'
return
parser = argparse.ArgumentParser()
parser.add_argument('--githash',
help=('Githash prefix (7+ chars) to rebaseline to. If '
'a second one is supplied after comma, and it has '
'corresponding bench data, will shift the range '
'center to the average of two expected values.'))
parser.add_argument('--bots',
help=('Comma-separated list of bots to work on. If no '
'matching bots are found in the list, will default '
'to processing all bots.'))
parser.add_argument('--commit', action='store_true',
help='Whether to commit changes automatically.')
args = parser.parse_args()
repo_dir = os.path.join(d, 'skia')
if not os.path.exists(repo_dir):
os.makedirs(repo_dir)
if not checkout_or_update_skia(repo_dir):
print 'ERROR setting up Skia repo at %s' % repo_dir
return 1
file_in_repo = os.path.join(d, 'skia/experimental/benchtools/rebase.py')
if not filecmp.cmp(__file__, file_in_repo):
shutil.copy(file_in_repo, __file__)
print 'Updated this script from repo; please run again.'
return
all_platforms = [] # Find existing list of platforms with expectations.
for item in os.listdir(os.path.join(d, 'skia/expectations/bench')):
all_platforms.append(
item.replace('bench_expectations_', '').replace('.txt', ''))
platforms = []
# If at least one given bot is in all_platforms, use list of valid args.bots.
if args.bots:
bots = args.bots.strip().split(',')
for bot in bots:
if bot in all_platforms: # Filters platforms with given bot list.
platforms.append(bot)
if not platforms: # Include all existing platforms with expectations.
platforms = all_platforms
if not args.githash or len(args.githash) < 7:
raise Exception('Please provide --githash with a longer prefix (7+).')
githashes = args.githash.strip().split(',')
if len(githashes[0]) < 7:
raise Exception('Please provide --githash with longer prefixes (7+).')
commit = False
if args.commit:
commit = True
rebase_hash = githashes[0][:7]
extra_hash = ''
if len(githashes) == 2:
extra_hash = githashes[1][:7]
hashes = get_git_hashes()
short_hashes = [h[:7] for h in hashes]
if (rebase_hash not in short_hashes or
(extra_hash and extra_hash not in short_hashes) or
rebase_hash == extra_hash):
raise Exception('Provided --githashes not found, or identical!')
if extra_hash:
extra_hash = hashes[short_hashes.index(extra_hash)]
hashes = hashes[:short_hashes.index(rebase_hash) + 1]
update_li = []
ts_str = '%s' % time.time()
gs_dir = os.path.join(d, 'gs' + ts_str)
exp_dir = os.path.join(d, 'exp' + ts_str)
extra_dir = os.path.join(d, 'extra' + ts_str)
clean_dir(gs_dir)
clean_dir(exp_dir)
clean_dir(extra_dir)
for p in platforms:
clean_dir(os.path.join(gs_dir, p))
clean_dir(os.path.join(extra_dir, p))
hash_to_use = ''
for h in reversed(hashes):
li = get_gs_filelist(p, h)
if not len(li): # no data
continue
if download_gs_files(p, h, gs_dir):
print 'Copied %s/%s' % (p, h)
hash_to_use = h
break
else:
print 'DOWNLOAD BENCH FAILED %s/%s' % (p, h)
break
if hash_to_use:
if extra_hash and download_gs_files(p, extra_hash, extra_dir):
print 'Copied extra data %s/%s' % (p, extra_hash)
if calc_expectations(p, h, gs_dir, exp_dir, repo_dir, extra_dir,
extra_hash):
update_li.append('bench_expectations_%s.txt' % p)
elif calc_expectations(p, h, gs_dir, exp_dir, repo_dir, '', ''):
update_li.append('bench_expectations_%s.txt' % p)
if not update_li:
print 'No bench data to update after %s!' % args.githash
elif not git_commit_expectations(
repo_dir, exp_dir, update_li, rebase_hash, commit, extra_hash):
print 'ERROR uploading expectations using git.'
elif not commit:
print 'CL created. Please take a look at the link above.'
else:
print 'New bench baselines should be in CQ now.'
delete_dirs([gs_dir, exp_dir, extra_dir])
if __name__ == "__main__":
main()
| mydongistiny/external_chromium_org_third_party_skia | experimental/benchtools/rebase.py | Python | bsd-3-clause | 11,503 |
import time
import subprocess
import select
from logcmd.defaults import DefaultTemplate
from logcmd.procmanager import ProcManager
class IOManager (object):
def __init__(self,
outstream,
tmpl=DefaultTemplate,
params=None,
_select=select.select,
_popen=subprocess.Popen,
_gettime=time.gmtime):
self._outstream = outstream
self._tmpl = tmpl
self._params = params
self._select = _select
self._popen = _popen
self._gettime = _gettime
self._readables = {}
self._pms = set()
self._exitstatus = 0
def launch(self, args):
pman = ProcManager(
self._outstream,
args,
self._tmpl,
self._params,
_popen=self._popen,
_gettime=self._gettime,
)
self._pms.add(pman)
for f in pman.readables:
self._readables[f] = pman
def mainloop(self):
while len(self._pms) > 0 or len(self._readables) > 0:
self._filter_closed_processes()
self._handle_io()
return self._exitstatus
def _filter_closed_processes(self):
for pm in list(self._pms):
rc = pm.check_closed()
if rc is None:
continue
else:
self._pms.remove(pm)
if rc != 0:
self._exitstatus = 1
def _handle_io(self):
if len(self._readables) > 0:
(rds, wds, xds) = self._select(self._readables.keys(), [], [])
assert (wds, xds) == ([], []), repr((rds, wds, xds))
for rd in rds:
if not self._readables[rd].handle_read(rd):
del self._readables[rd]
| nejucomo/logcmd | logcmd/iomanager.py | Python | gpl-3.0 | 1,824 |
"""Tests for syncthing config flow."""
from unittest.mock import patch
from aiosyncthing.exceptions import UnauthorizedError
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.syncthing.const import DOMAIN
from homeassistant.const import CONF_NAME, CONF_TOKEN, CONF_URL, CONF_VERIFY_SSL
from tests.common import MockConfigEntry
NAME = "Syncthing"
URL = "http://127.0.0.1:8384"
TOKEN = "token"
VERIFY_SSL = True
MOCK_ENTRY = {
CONF_NAME: NAME,
CONF_URL: URL,
CONF_TOKEN: TOKEN,
CONF_VERIFY_SSL: VERIFY_SSL,
}
async def test_show_setup_form(hass):
"""Test that the setup form is served."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {}
assert result["step_id"] == "user"
async def test_flow_successful(hass):
"""Test with required fields only."""
with patch(
"aiosyncthing.system.System.status", return_value={"myID": "server-id"}
), patch(
"homeassistant.components.syncthing.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "user"},
data={
CONF_NAME: NAME,
CONF_URL: URL,
CONF_TOKEN: TOKEN,
CONF_VERIFY_SSL: VERIFY_SSL,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "http://127.0.0.1:8384"
assert result["data"][CONF_NAME] == NAME
assert result["data"][CONF_URL] == URL
assert result["data"][CONF_TOKEN] == TOKEN
assert result["data"][CONF_VERIFY_SSL] == VERIFY_SSL
assert len(mock_setup_entry.mock_calls) == 1
async def test_flow_already_configured(hass):
"""Test name is already configured."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRY, unique_id="server-id")
entry.add_to_hass(hass)
with patch("aiosyncthing.system.System.status", return_value={"myID": "server-id"}):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "user"},
data=MOCK_ENTRY,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_flow_invalid_auth(hass):
"""Test invalid auth."""
with patch("aiosyncthing.system.System.status", side_effect=UnauthorizedError):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "user"},
data=MOCK_ENTRY,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["token"] == "invalid_auth"
async def test_flow_cannot_connect(hass):
"""Test cannot connect."""
with patch("aiosyncthing.system.System.status", side_effect=Exception):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "user"},
data=MOCK_ENTRY,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"]["base"] == "cannot_connect"
| jawilson/home-assistant | tests/components/syncthing/test_config_flow.py | Python | apache-2.0 | 3,372 |
# -*- encoding: utf-8 -*-
from SRAObjects import *
from Read import *
| polarise/breeze | assets/__init__.py | Python | gpl-2.0 | 70 |
# -*- coding: utf-8 -*-
#
# EditQuality documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
# If extensions (or modules to document with autodoc) are in another directory,
import sys
import alabaster
import editquality
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
print(dir_path)
sys.path.insert(0, dir_path)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
html_theme_path = [alabaster.get_path()]
extensions = ['alabaster']
html_theme = 'alabaster'
github_button = True
github_user = "wikimedia"
github_repo = "editquality"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.autosummary',
'sphinx.ext.extlinks',
'm2r',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'editquality'
copyright = u'2019, Scoring Platform team'
author = u'Aaron Halfaker'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = editquality.__version__
# The full version, including alpha/beta/rc tags.
release = editquality.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
extlinks = {
'issue': ('https://github.com/wikimedia/editquality/issues/%s', '#'),
'pr': ('https://github.com/wikimedia/editquality/pull/%s', 'PR #'),
}
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'EditQualitydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'EditQuality.tex', u'Edit Quality Documentation',
u'Scoring Platform team', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'editquality', u'Edit Quality Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Edit Quality', u'Edit Quality Documentation',
author, 'EditQuality', 'A supervised learning approach to determining the quality of edits in Wikipedia.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/3/': None,
'https://revscoring.readthedocs.io/en/latest/': None}
| wiki-ai/editquality | docs/conf.py | Python | mit | 10,170 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
'''
TZlibTransport provides a compressed transport and transport factory
class, using the python standard library zlib module to implement
data compression.
'''
from __future__ import division
import zlib
from cStringIO import StringIO
from TTransport import TTransportBase, CReadableTransport
class TZlibTransportFactory(object):
'''
Factory transport that builds zlib compressed transports.
This factory caches the last single client/transport that it was passed
and returns the same TZlibTransport object that was created.
This caching means the TServer class will get the _same_ transport
object for both input and output transports from this factory.
(For non-threaded scenarios only, since the cache only holds one object)
The purpose of this caching is to allocate only one TZlibTransport where
only one is really needed (since it must have separate read/write buffers),
and makes the statistics from getCompSavings() and getCompRatio()
easier to understand.
'''
# class scoped cache of last transport given and zlibtransport returned
_last_trans = None
_last_z = None
def getTransport(self, trans, compresslevel=9):
'''Wrap a transport , trans, with the TZlibTransport
compressed transport class, returning a new
transport to the caller.
@param compresslevel: The zlib compression level, ranging
from 0 (no compression) to 9 (best compression). Defaults to 9.
@type compresslevel: int
This method returns a TZlibTransport which wraps the
passed C{trans} TTransport derived instance.
'''
if trans == self._last_trans:
return self._last_z
ztrans = TZlibTransport(trans, compresslevel)
self._last_trans = trans
self._last_z = ztrans
return ztrans
class TZlibTransport(TTransportBase, CReadableTransport):
'''
Class that wraps a transport with zlib, compressing writes
and decompresses reads, using the python standard
library zlib module.
'''
# Read buffer size for the python fastbinary C extension,
# the TBinaryProtocolAccelerated class.
DEFAULT_BUFFSIZE = 4096
def __init__(self, trans, compresslevel=9):
'''
Create a new TZlibTransport, wrapping C{trans}, another
TTransport derived object.
@param trans: A thrift transport object, i.e. a TSocket() object.
@type trans: TTransport
@param compresslevel: The zlib compression level, ranging
from 0 (no compression) to 9 (best compression). Default is 9.
@type compresslevel: int
'''
self.__trans = trans
self.compresslevel = compresslevel
self.__rbuf = StringIO()
self.__wbuf = StringIO()
self._init_zlib()
self._init_stats()
def _reinit_buffers(self):
'''
Internal method to initialize/reset the internal StringIO objects
for read and write buffers.
'''
self.__rbuf = StringIO()
self.__wbuf = StringIO()
def _init_stats(self):
'''
Internal method to reset the internal statistics counters
for compression ratios and bandwidth savings.
'''
self.bytes_in = 0
self.bytes_out = 0
self.bytes_in_comp = 0
self.bytes_out_comp = 0
def _init_zlib(self):
'''
Internal method for setting up the zlib compression and
decompression objects.
'''
self._zcomp_read = zlib.decompressobj()
self._zcomp_write = zlib.compressobj(self.compresslevel)
def getCompRatio(self):
'''
Get the current measured compression ratios (in,out) from
this transport.
Returns a tuple of:
(inbound_compression_ratio, outbound_compression_ratio)
The compression ratios are computed as:
compressed / uncompressed
E.g., data that compresses by 10x will have a ratio of: 0.10
and data that compresses to half of ts original size will
have a ratio of 0.5
None is returned if no bytes have yet been processed in
a particular direction.
'''
r_percent, w_percent = (None, None)
if self.bytes_in > 0:
r_percent = self.bytes_in_comp / self.bytes_in
if self.bytes_out > 0:
w_percent = self.bytes_out_comp / self.bytes_out
return (r_percent, w_percent)
def getCompSavings(self):
'''
Get the current count of saved bytes due to data
compression.
Returns a tuple of:
(inbound_saved_bytes, outbound_saved_bytes)
Note: if compression is actually expanding your
data (only likely with very tiny thrift objects), then
the values returned will be negative.
'''
r_saved = self.bytes_in - self.bytes_in_comp
w_saved = self.bytes_out - self.bytes_out_comp
return (r_saved, w_saved)
def isOpen(self):
'''Return the underlying transport's open status'''
return self.__trans.isOpen()
def open(self):
"""Open the underlying transport"""
self._init_stats()
return self.__trans.open()
def listen(self):
'''Invoke the underlying transport's listen() method'''
self.__trans.listen()
def accept(self):
'''Accept connections on the underlying transport'''
return self.__trans.accept()
def close(self):
'''Close the underlying transport,'''
self._reinit_buffers()
self._init_zlib()
return self.__trans.close()
def read(self, sz):
'''
Read up to sz bytes from the decompressed bytes buffer, and
read from the underlying transport if the decompression
buffer is empty.
'''
ret = self.__rbuf.read(sz)
if len(ret) > 0:
return ret
# keep reading from transport until something comes back
while True:
if self.readComp(sz):
break
ret = self.__rbuf.read(sz)
return ret
def readComp(self, sz):
'''
Read compressed data from the underlying transport, then
decompress it and append it to the internal StringIO read buffer
'''
zbuf = self.__trans.read(sz)
zbuf = self._zcomp_read.unconsumed_tail + zbuf
buf = self._zcomp_read.decompress(zbuf)
self.bytes_in += len(zbuf)
self.bytes_in_comp += len(buf)
old = self.__rbuf.read()
self.__rbuf = StringIO(old + buf)
if len(old) + len(buf) == 0:
return False
return True
def write(self, buf):
'''
Write some bytes, putting them into the internal write
buffer for eventual compression.
'''
self.__wbuf.write(buf)
def flush(self):
'''
Flush any queued up data in the write buffer and ensure the
compression buffer is flushed out to the underlying transport
'''
wout = self.__wbuf.getvalue()
if len(wout) > 0:
zbuf = self._zcomp_write.compress(wout)
self.bytes_out += len(wout)
self.bytes_out_comp += len(zbuf)
else:
zbuf = ''
ztail = self._zcomp_write.flush(zlib.Z_SYNC_FLUSH)
self.bytes_out_comp += len(ztail)
if (len(zbuf) + len(ztail)) > 0:
self.__wbuf = StringIO()
self.__trans.write(zbuf + ztail)
self.__trans.flush()
@property
def cstringio_buf(self):
'''Implement the CReadableTransport interface'''
return self.__rbuf
def cstringio_refill(self, partialread, reqlen):
'''Implement the CReadableTransport interface for refill'''
retstring = partialread
if reqlen < self.DEFAULT_BUFFSIZE:
retstring += self.read(self.DEFAULT_BUFFSIZE)
while len(retstring) < reqlen:
retstring += self.read(reqlen - len(retstring))
self.__rbuf = StringIO(retstring)
return self.__rbuf
| YakindanEgitim/EN-LinuxClipper | thrift/transport/TZlibTransport.py | Python | gpl-3.0 | 8,187 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This is your project's main settings file that can be committed to your
# repo. If you need to override a setting locally, use settings_local.py
from funfactory.settings_base import *
import sys
sys.path.append("/home/minion/minion/plugins/")
sys.path.append("/home/minion/minion/task-engine/")
# Name of the top-level module where you put all your apps.
# If you did not install Playdoh with the funfactory installer script
# you may need to edit this value. See the docs about installing from a
# clone.
PROJECT_MODULE = 'project'
# Defines the views served for root URLs.
ROOT_URLCONF = '%s.urls' % PROJECT_MODULE
INSTALLED_APPS = list(INSTALLED_APPS) + [
# Application base, containing global templates.
'%s.base' % PROJECT_MODULE,
# Main scanner application
'%s.scanner' % PROJECT_MODULE,
#Session Management
'django.contrib.sessions'
]
LOCALE_PATHS = (
os.path.join(ROOT, PROJECT_MODULE, 'locale'),
)
# Because Jinja2 is the default template loader, add any non-Jinja templated
# apps here:
JINGO_EXCLUDE_APPS = [
'admin',
'registration',
]
# BrowserID configuration
AUTHENTICATION_BACKENDS = [
'django_browserid.auth.BrowserIDBackend',
'django.contrib.auth.backends.ModelBackend',
]
SITE_URL = ''
LOGIN_URL = '/'
LOGIN_REDIRECT_URL = 'scanner.home'
LOGIN_REDIRECT_URL_FAILURE = 'scanner.home'
TEMPLATE_CONTEXT_PROCESSORS = list(TEMPLATE_CONTEXT_PROCESSORS) + [
'django_browserid.context_processors.browserid_form',
]
#The path to the function that handles the creation of users as well as allowed domains for singup (edit in local settings)
BROWSERID_CREATE_USER = 'project.scanner.util.create_user'
ACCEPTED_USER_DOMAINS = [
]
# Always generate a CSRF token for anonymous users.
ANON_ALWAYS = True
# Tells the extract script what files to look for L10n in and what function
# handles the extraction. The Tower library expects this.
DOMAIN_METHODS['messages'] = [
('%s/**.py' % PROJECT_MODULE,
'tower.management.commands.extract.extract_tower_python'),
('%s/**/templates/**.html' % PROJECT_MODULE,
'tower.management.commands.extract.extract_tower_template'),
('templates/**.html',
'tower.management.commands.extract.extract_tower_template'),
],
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['lhtml'] = [
# ('**/templates/**.lhtml',
# 'tower.management.commands.extract.extract_tower_template'),
# ]
# # Use this if you have localizable JS files:
# DOMAIN_METHODS['javascript'] = [
# # Make sure that this won't pull in strings from external libraries you
# # may use.
# ('media/js/**.js', 'javascript'),
# ]
LOGGING = dict(loggers=dict(playdoh = {'level': logging.DEBUG}))
| mozilla/minion-frontend-old | project/settings/base.py | Python | bsd-3-clause | 2,925 |
# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Provide accessors to enhance interoperability between Pandas and MetPy."""
import functools
import pandas as pd
__all__ = []
def preprocess_pandas(func):
"""Decorate a function to convert all data series arguments to `np.ndarray`."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
# not using hasattr(a, values) because it picks up dict.values()
# and this is more explicitly handling pandas
args = tuple(a.values if isinstance(a, pd.Series) else a for a in args)
kwargs = {name: (v.values if isinstance(v, pd.Series) else v)
for name, v in kwargs.items()}
return func(*args, **kwargs)
return wrapper
| Unidata/MetPy | src/metpy/pandas.py | Python | bsd-3-clause | 826 |
import time
import statsd
class PipedStatsd(statsd.DogStatsd):
def time_deferred(self, d, metric, tags, sample_rate=1):
def _time_and_passthrough(result, start):
self.timing(metric, time.time() - start, tags=tags, sample_rate=sample_rate)
return result
d.addBoth(_time_and_passthrough, time.time())
return d
class StatsdManager(object):
""" Returns `PipedStatsd`-clients as attributes.
Assuming statsd-configurations under "statsd" map logical names to `PipedStatsd`-configurations, which
currently only takes "host" and "port" as options, the manager will return `PipedStatsd`-clients with the
corresponding configuration.
The clients are accessible as manager_instance.logical_name.
If a client does not have a configuration, it defaults to `host="localhost"` and "port=8125".
Also provides the statsd-functions of the "default"-client directly. i.e. `manager_instance.gauge(...)` will
use the "default"-client, however it is configured.
"""
_statsd_keywords = set(["decrement", "gauge", "histogram", "increment", "set", "timed", "time_deferred", "timing"])
def __init__(self):
self._client_by_name = dict()
def configure(self, runtime_environment):
self.runtime_environment = runtime_environment
self.default_config = runtime_environment.get_configuration_value('statsd.default', dict(host='localhost', port=8125))
self.default = PipedStatsd(**self.default_config)
def __getattr__(self, item):
if item in self._statsd_keywords:
return getattr(self.default, item)
if item not in self._client_by_name:
config = self.runtime_environment.get_configuration_value('statsd.{0}'.format(item), Ellipsis)
if config is not Ellipsis:
self._client_by_name[item] = PipedStatsd(**config)
else:
self._client_by_name[item] = self.default
return self._client_by_name[item]
| alexbrasetvik/Piped | contrib/statsd/piped_statsd/client.py | Python | mit | 2,015 |
import pickle
import pandas as pd
# Scikit-learn method to split the dataset into train and test dataset
from sklearn.cross_validation import train_test_split
# Scikit-learn method to implement the decision tree classifier
from sklearn.tree import DecisionTreeClassifier
# Load the dataset
balance_scale_data = pd.read_csv('balancescale.data', sep=',', header=None)
print("Dataset Length:: ", len(balance_scale_data))
print("Dataset Shape:: ", balance_scale_data.shape)
# Split the dataset into train and test dataset
X = balance_scale_data.values[:, 1:5]
Y = balance_scale_data.values[:, 0]
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.3, random_state=100)
# Decision model with Gini index critiria
decision_tree_model = DecisionTreeClassifier(criterion="gini", random_state=100, max_depth=3, min_samples_leaf=5)
decision_tree_model.fit(X_train, y_train)
print("Decision Tree classifier :: ", decision_tree_model)
print("prediction: ", decision_tree_model.predict([1,1,3,4]))
# Dump the trained decision tree classifier with Pickle
decision_tree_pkl_filename = 'python_balancescale.pkl'
# Open the file to save as pkl file
decision_tree_model_pkl = open(decision_tree_pkl_filename, 'wb')
pickle.dump(decision_tree_model, decision_tree_model_pkl)
# Close the pickle instances
decision_tree_model_pkl.close()
# Loading the saved decision tree model pickle
decision_tree_model_pkl = open(decision_tree_pkl_filename, 'rb')
decision_tree_model = pickle.load(decision_tree_model_pkl)
print("Loaded Decision tree model :: ", decision_tree_model)
print("prediction: ", decision_tree_model.predict([[1,1,3,4]]))
decision_tree_model_pkl.close() | shareactorIO/pipeline | source.ml/prediction.ml/python/store/default/python_balancescale/1/train_balancescale.py | Python | apache-2.0 | 1,668 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import JsonResponse
from django.http import HttpResponseRedirect
from django.http import Http404, HttpResponse
from django.shortcuts import redirect
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
from django import forms
from django.contrib.auth.forms import UserCreationForm
from django.contrib import auth
from django.contrib.auth.models import User
from django.contrib.auth import get_user_model
from oauth_client.models import Survey, Diary, Url
from random import Random
import time
import requests
import json
import re
import hashlib
oauth_server="http://auth.bctf.xctf.org.cn"
app_server="http://diary.bctf.xctf.org.cn"
client_id="nSGRX6eRNbkHBy457ZfiNG1JrUjJ9k7hZigKYA1q"
client_secret="S5Elk8WWpylMbNedDlpN12ds0zCgMNzcJCbHQqnc32Td4YKMpkLEX8F8i02I8DuYbcwZQvn0wbiiSlGSNHGhlLoOxos4xqHE5TCHvFlklUDkPF4VtNBVVdSKY8wC9btA"
UserModel = get_user_model()
# Create your views here.
@require_http_methods(["GET"])
def index(request):
return render(request, 'index.html')
@require_http_methods(["GET"])
def about(request):
return render(request, 'about.html')
@require_http_methods(["GET"])
def logout(request):
auth.logout(request)
return HttpResponseRedirect(oauth_server+"/accounts/logout/")
def _authenticate(request, username):
try:
user = UserModel._default_manager.get_by_natural_key(username)
except :
pass
else:
if _user_can_authenticate(user):
return user
return None
def _user_can_authenticate(user):
"""
Reject users with is_active=False. Custom user models that don't have
that attribute are allowed.
"""
is_active = getattr(user, 'is_active', None)
return is_active or is_active is None
@require_http_methods(["GET"])
@csrf_exempt
def receive_authcode(request):
if 'start_oauth' in request.session and request.session['start_oauth'] == 1:
request.session['start_oauth'] = 0
else:
raise Http404()
try:
if 'code' in request.GET:
code = request.GET.get('code', '').strip()
if code=='':
raise Http404()
url = oauth_server+'/o/token/'
s = requests.Session()
var = {'grant_type':'authorization_code',
'code':code,
'redirect_uri':app_server+'/o/receive_authcode',
'client_id':client_id,
'client_secret':client_secret,
}
r = s.post(url=url,data=var)
res=json.loads(r.text)
if 'access_token' in res:
access_token=res['access_token']
url = oauth_server+'/o/get-username/'
s = requests.Session()
var = {'token':access_token,}
headers = {'Authorization': 'Bearer '+access_token}
r = s.post(url=url,data=var,headers=headers)
res=json.loads(r.text)
username=res['username']
user = _authenticate(request, username)
if user!=None:
auth.login(request, user)
return redirect('/')
else:
new_user = User.objects.create_user(username=username, password="e6gqxLHvFR74LNBLvJpFDw20IrQH6nef")
new_user.save()
user = _authenticate(request, username)
if user!=None:
auth.login(request, user)
return redirect('/')
else:
raise Http404()
except:
pass
raise Http404()
@require_http_methods(["GET"])
def login(request):
if request.user.is_authenticated:
return redirect('/')
auth_url = oauth_server+"/o/authorize/?client_id="+client_id+"&state=preauth&response_type=code"
request.session['start_oauth'] = 1
return HttpResponseRedirect(auth_url)
@require_http_methods(["GET"])
def diary(request):
if not request.user.is_authenticated:
raise Http404()
return render(request, 'diary.html')
@require_http_methods(["GET","POST"])
def survey(request):
if not request.user.is_authenticated:
raise Http404()
if request.method != 'POST':
return render(request, 'survey.html')
rate = request.POST.get('rate', '')
if rate=='1':
rate=1
elif rate=='2':
rate=2
elif rate=='3':
rate=3
elif rate=='4':
rate=4
elif rate=='5':
rate=5
else:
return render(request, 'survey.html', {'msg': 'Rate is invalid!'})
suggestion = request.POST.get('suggestion', '').strip()
if len(suggestion) > 2000 :
return render(request, 'survey.html', {'msg': 'Advice is too long!'})
if len(suggestion) <= 0:
return render(request, 'survey.html', {'msg': 'Advice is empty!'})
try:
Survey.objects.get(username=request.user.username,rate=rate,suggestion=suggestion)
except Survey.DoesNotExist:
Survey.objects.create(username=request.user.username,rate=rate,suggestion=suggestion)
if request.user.username=="firesun":
return render(request, 'survey.html', {'msg': 'Thank you. I will give you the flag. Flag is bctf{bFJbSakOT72T8HbDIrlst4kXGYbaHWgV}'})
else:
return render(request, 'survey.html', {'msg': 'Thank you. But the boss said only admin can get the flag after he finishes this survey, XD'})
@require_http_methods(["GET","POST"])
def edit_diary(request):
if not request.user.is_authenticated:
raise Http404()
if request.user.username=="firesun":
return HttpResponse("Don't do this!")
if request.method != 'POST':
return render(request, 'edit_diary.html')
content = request.POST.get('content', '').strip()
if len(content) > 1000 :
return render(request, 'edit_diary.html', {'msg': 'Too long!'})
if len(content) <= 0:
return render(request, 'edit_diary.html', {'msg': 'Write something!'})
try:
diary=Diary.objects.get(username=request.user.username)
Diary.objects.filter(id=diary.id).update(content=content)
except Diary.DoesNotExist:
Diary.objects.create(username=request.user.username,content=content)
return redirect('/diary/')
@require_http_methods(["GET"])
def report_status(request):
try:
url=Url.objects.get(id=request.GET.get('id', ''))
if url.is_read:
return HttpResponse("Admin has visited the address.")
else:
return HttpResponse("Admin doesn't visit the address yet.")
except:
raise Http404()
def random_str(randomlength=5):
str = ''
chars = '0123456789abcdef'
length = len(chars) - 1
random = Random()
for i in range(randomlength):
str+=chars[random.randint(0, length)]
return str
@require_http_methods(["GET","POST"])
def report_bugs(request):
if not request.user.is_authenticated:
raise Http404()
if request.method != 'POST':
captcha=random_str()
request.session['captcha']=captcha
return render(request, 'report.html',{'captcha': captcha})
else:
if ('captcha' in request.session) and (request.session['captcha'] == hashlib.md5(request.POST.get('captcha', '')).hexdigest()[0:5]):
captcha=request.session['captcha']
url = request.POST.get('url', '').strip()
if not url.startswith('http://diary.bctf.xctf.org.cn/'):
return render(request, 'report.html', {'msg': 'We only care about the problem from this website (http://diary.bctf.xctf.org.cn)!','captcha': captcha})
if len(url) > 200 or len(url) <= 0:
return render(request, 'report.html', {'msg': 'URL is too long!','captcha': captcha})
if not re.match(r'^https?:\/\/[\w\.\/:\-&@%=\?]+$', url):
return render(request, 'report.html', {'msg': 'Invalid URL!','captcha': captcha})
try:
new_url=Url.objects.create(url=url)
except:
return render(request, 'report.html', {'msg': 'Invalid URL!','captcha': captcha})
captcha=random_str()
request.session['captcha']=captcha
return render(request, 'report.html', {'msg': 'Report success! Click <a href="/report-status/?id='+str(new_url.id)+'">here</a> to check the status.','captcha': captcha})
else:
captcha=random_str()
request.session['captcha']=captcha
return render(request, 'report.html',{'msg': 'Invalid Captcha!','captcha': captcha})
@require_http_methods(["GET"])
def view_diary(request):
if not request.user.is_authenticated:
raise Http404()
content="Empty!"
try:
diary=Diary.objects.get(username=request.user.username)
content=diary.content
except:
pass
return JsonResponse({'content':content})
| firesunCN/My_CTF_Challenges | bctf_2017/diary/diary_server/firecms/oauth_client/views.py | Python | gpl-3.0 | 9,288 |
# Authors:
# Endi S. Dewata <[email protected]>
#
# Copyright (C) 2015 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import base64
import getpass
import io
import json
import os
import sys
from cryptography.fernet import Fernet, InvalidToken
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.serialization import load_pem_public_key,\
load_pem_private_key
import nss.nss as nss
from ipalib.frontend import Command, Object, Local
from ipalib import api, errors
from ipalib import Bytes, Flag, Str, StrEnum
from ipalib import output
from ipalib.crud import PKQuery, Retrieve
from ipalib.plugable import Registry
from ipalib.plugins.baseldap import LDAPObject, LDAPCreate, LDAPDelete,\
LDAPSearch, LDAPUpdate, LDAPRetrieve, LDAPAddMember, LDAPRemoveMember,\
LDAPModMember, pkey_to_value
from ipalib.request import context
from ipalib.plugins.baseuser import split_principal
from ipalib.plugins.service import normalize_principal
from ipalib import _, ngettext
from ipaplatform.paths import paths
from ipapython.dn import DN
if api.env.in_server:
import pki.account
import pki.key
__doc__ = _("""
Vaults
""") + _("""
Manage vaults.
""") + _("""
Vault is a secure place to store a secret.
""") + _("""
Based on the ownership there are three vault categories:
* user/private vault
* service vault
* shared vault
""") + _("""
User vaults are vaults owned used by a particular user. Private
vaults are vaults owned the current user. Service vaults are
vaults owned by a service. Shared vaults are owned by the admin
but they can be used by other users or services.
""") + _("""
Based on the security mechanism there are three types of
vaults:
* standard vault
* symmetric vault
* asymmetric vault
""") + _("""
Standard vault uses a secure mechanism to transport and
store the secret. The secret can only be retrieved by users
that have access to the vault.
""") + _("""
Symmetric vault is similar to the standard vault, but it
pre-encrypts the secret using a password before transport.
The secret can only be retrieved using the same password.
""") + _("""
Asymmetric vault is similar to the standard vault, but it
pre-encrypts the secret using a public key before transport.
The secret can only be retrieved using the private key.
""") + _("""
EXAMPLES:
""") + _("""
List vaults:
ipa vault-find
[--user <user>|--service <service>|--shared]
""") + _("""
Add a standard vault:
ipa vault-add <name>
[--user <user>|--service <service>|--shared]
--type standard
""") + _("""
Add a symmetric vault:
ipa vault-add <name>
[--user <user>|--service <service>|--shared]
--type symmetric --password-file password.txt
""") + _("""
Add an asymmetric vault:
ipa vault-add <name>
[--user <user>|--service <service>|--shared]
--type asymmetric --public-key-file public.pem
""") + _("""
Show a vault:
ipa vault-show <name>
[--user <user>|--service <service>|--shared]
""") + _("""
Modify vault description:
ipa vault-mod <name>
[--user <user>|--service <service>|--shared]
--desc <description>
""") + _("""
Modify vault type:
ipa vault-mod <name>
[--user <user>|--service <service>|--shared]
--type <type>
[old password/private key]
[new password/public key]
""") + _("""
Modify symmetric vault password:
ipa vault-mod <name>
[--user <user>|--service <service>|--shared]
--change-password
ipa vault-mod <name>
[--user <user>|--service <service>|--shared]
--old-password <old password>
--new-password <new password>
ipa vault-mod <name>
[--user <user>|--service <service>|--shared]
--old-password-file <old password file>
--new-password-file <new password file>
""") + _("""
Modify asymmetric vault keys:
ipa vault-mod <name>
[--user <user>|--service <service>|--shared]
--private-key-file <old private key file>
--public-key-file <new public key file>
""") + _("""
Delete a vault:
ipa vault-del <name>
[--user <user>|--service <service>|--shared]
""") + _("""
Display vault configuration:
ipa vaultconfig-show
""") + _("""
Archive data into standard vault:
ipa vault-archive <name>
[--user <user>|--service <service>|--shared]
--in <input file>
""") + _("""
Archive data into symmetric vault:
ipa vault-archive <name>
[--user <user>|--service <service>|--shared]
--in <input file>
--password-file password.txt
""") + _("""
Archive data into asymmetric vault:
ipa vault-archive <name>
[--user <user>|--service <service>|--shared]
--in <input file>
""") + _("""
Retrieve data from standard vault:
ipa vault-retrieve <name>
[--user <user>|--service <service>|--shared]
--out <output file>
""") + _("""
Retrieve data from symmetric vault:
ipa vault-retrieve <name>
[--user <user>|--service <service>|--shared]
--out <output file>
--password-file password.txt
""") + _("""
Retrieve data from asymmetric vault:
ipa vault-retrieve <name>
[--user <user>|--service <service>|--shared]
--out <output file> --private-key-file private.pem
""") + _("""
Add vault owners:
ipa vault-add-owner <name>
[--user <user>|--service <service>|--shared]
[--users <users>] [--groups <groups>] [--services <services>]
""") + _("""
Delete vault owners:
ipa vault-remove-owner <name>
[--user <user>|--service <service>|--shared]
[--users <users>] [--groups <groups>] [--services <services>]
""") + _("""
Add vault members:
ipa vault-add-member <name>
[--user <user>|--service <service>|--shared]
[--users <users>] [--groups <groups>] [--services <services>]
""") + _("""
Delete vault members:
ipa vault-remove-member <name>
[--user <user>|--service <service>|--shared]
[--users <users>] [--groups <groups>] [--services <services>]
""")
def validated_read(argname, filename, mode='r', encoding=None):
"""Read file and catch errors
IOError and UnicodeError (for text files) are turned into a
ValidationError
"""
try:
with io.open(filename, mode=mode, encoding=encoding) as f:
data = f.read()
except IOError as exc:
raise errors.ValidationError(
name=argname,
error=_("Cannot read file '%(filename)s': %(exc)s") % {
'filename': filename, 'exc': exc.args[1]
}
)
except UnicodeError as exc:
raise errors.ValidationError(
name=argname,
error=_("Cannot decode file '%(filename)s': %(exc)s") % {
'filename': filename, 'exc': exc
}
)
return data
register = Registry()
MAX_VAULT_DATA_SIZE = 2**20 # = 1 MB
vault_options = (
Str(
'service?',
doc=_('Service name of the service vault'),
normalizer=normalize_principal,
),
Flag(
'shared?',
doc=_('Shared vault'),
),
Str(
'username?',
cli_name='user',
doc=_('Username of the user vault'),
),
)
class VaultModMember(LDAPModMember):
def get_options(self):
for param in super(VaultModMember, self).get_options():
if param.name == 'service' and param not in vault_options:
param = param.clone_rename('services')
yield param
def get_member_dns(self, **options):
if 'services' in options:
options['service'] = options.pop('services')
else:
options.pop('service', None)
return super(VaultModMember, self).get_member_dns(**options)
def post_callback(self, ldap, completed, failed, dn, entry_attrs, *keys, **options):
for fail in failed.itervalues():
fail['services'] = fail.pop('service', [])
self.obj.get_container_attribute(entry_attrs, options)
return completed, dn
@register()
class vaultcontainer(LDAPObject):
__doc__ = _("""
Vault Container object.
""")
container_dn = api.env.container_vault
object_name = _('vaultcontainer')
object_name_plural = _('vaultcontainers')
object_class = ['ipaVaultContainer']
permission_filter_objectclasses = ['ipaVaultContainer']
attribute_members = {
'owner': ['user', 'group', 'service'],
}
label = _('Vault Containers')
label_singular = _('Vault Container')
managed_permissions = {
'System: Read Vault Containers': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'read', 'search', 'compare'},
'ipapermdefaultattr': {
'objectclass', 'cn', 'description', 'owner',
},
'default_privileges': {'Vault Administrators'},
},
'System: Add Vault Containers': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'add'},
'default_privileges': {'Vault Administrators'},
},
'System: Delete Vault Containers': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'delete'},
'default_privileges': {'Vault Administrators'},
},
'System: Modify Vault Containers': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'write'},
'ipapermdefaultattr': {
'objectclass', 'cn', 'description',
},
'default_privileges': {'Vault Administrators'},
},
'System: Manage Vault Container Ownership': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'write'},
'ipapermdefaultattr': {
'owner',
},
'default_privileges': {'Vault Administrators'},
},
}
takes_params = (
Str(
'owner_user?',
label=_('Owner users'),
),
Str(
'owner_group?',
label=_('Owner groups'),
),
Str(
'owner_service?',
label=_('Owner services'),
),
Str(
'owner?',
label=_('Failed owners'),
),
Str(
'service?',
label=_('Vault service'),
flags={'virtual_attribute'},
),
Flag(
'shared?',
label=_('Shared vault'),
flags={'virtual_attribute'},
),
Str(
'username?',
label=_('Vault user'),
flags={'virtual_attribute'},
),
)
def get_dn(self, *keys, **options):
"""
Generates vault DN from parameters.
"""
service = options.get('service')
shared = options.get('shared')
user = options.get('username')
count = (bool(service) + bool(shared) + bool(user))
if count > 1:
raise errors.MutuallyExclusiveError(
reason=_('Service, shared and user options ' +
'cannot be specified simultaneously'))
parent_dn = super(vaultcontainer, self).get_dn(*keys, **options)
if not count:
principal = getattr(context, 'principal')
if principal.startswith('host/'):
raise errors.NotImplementedError(
reason=_('Host is not supported'))
(name, realm) = split_principal(principal)
if '/' in name:
service = principal
else:
user = name
if service:
dn = DN(('cn', service), ('cn', 'services'), parent_dn)
elif shared:
dn = DN(('cn', 'shared'), parent_dn)
elif user:
dn = DN(('cn', user), ('cn', 'users'), parent_dn)
else:
raise RuntimeError
return dn
def get_container_attribute(self, entry, options):
if options.get('raw', False):
return
container_dn = DN(self.container_dn, self.api.env.basedn)
if entry.dn.endswith(DN(('cn', 'services'), container_dn)):
entry['service'] = entry.dn[0]['cn']
elif entry.dn.endswith(DN(('cn', 'shared'), container_dn)):
entry['shared'] = True
elif entry.dn.endswith(DN(('cn', 'users'), container_dn)):
entry['username'] = entry.dn[0]['cn']
@register()
class vaultcontainer_show(LDAPRetrieve):
__doc__ = _('Display information about a vault container.')
takes_options = LDAPRetrieve.takes_options + vault_options
has_output_params = LDAPRetrieve.has_output_params
def pre_callback(self, ldap, dn, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
return dn
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
self.obj.get_container_attribute(entry_attrs, options)
return dn
@register()
class vaultcontainer_del(LDAPDelete):
__doc__ = _('Delete a vault container.')
takes_options = LDAPDelete.takes_options + vault_options
msg_summary = _('Deleted vault container')
subtree_delete = False
def pre_callback(self, ldap, dn, *keys, **options):
assert isinstance(dn, DN)
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
return dn
def execute(self, *keys, **options):
keys = keys + (u'',)
return super(vaultcontainer_del, self).execute(*keys, **options)
@register()
class vaultcontainer_add_owner(VaultModMember, LDAPAddMember):
__doc__ = _('Add owners to a vault container.')
takes_options = LDAPAddMember.takes_options + vault_options
member_attributes = ['owner']
member_param_label = _('owner %s')
member_count_out = ('%i owner added.', '%i owners added.')
has_output = (
output.Entry('result'),
output.Output(
'failed',
type=dict,
doc=_('Owners that could not be added'),
),
output.Output(
'completed',
type=int,
doc=_('Number of owners added'),
),
)
@register()
class vaultcontainer_remove_owner(VaultModMember, LDAPRemoveMember):
__doc__ = _('Remove owners from a vault container.')
takes_options = LDAPRemoveMember.takes_options + vault_options
member_attributes = ['owner']
member_param_label = _('owner %s')
member_count_out = ('%i owner removed.', '%i owners removed.')
has_output = (
output.Entry('result'),
output.Output(
'failed',
type=dict,
doc=_('Owners that could not be removed'),
),
output.Output(
'completed',
type=int,
doc=_('Number of owners removed'),
),
)
@register()
class vault(LDAPObject):
__doc__ = _("""
Vault object.
""")
container_dn = api.env.container_vault
object_name = _('vault')
object_name_plural = _('vaults')
object_class = ['ipaVault']
permission_filter_objectclasses = ['ipaVault']
default_attributes = [
'cn',
'description',
'ipavaulttype',
'ipavaultsalt',
'ipavaultpublickey',
'owner',
'member',
]
search_display_attributes = [
'cn',
'description',
'ipavaulttype',
]
attribute_members = {
'owner': ['user', 'group', 'service'],
'member': ['user', 'group', 'service'],
}
label = _('Vaults')
label_singular = _('Vault')
managed_permissions = {
'System: Read Vaults': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'read', 'search', 'compare'},
'ipapermdefaultattr': {
'objectclass', 'cn', 'description', 'ipavaulttype',
'ipavaultsalt', 'ipavaultpublickey', 'owner', 'member',
'memberuser', 'memberhost',
},
'default_privileges': {'Vault Administrators'},
},
'System: Add Vaults': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'add'},
'default_privileges': {'Vault Administrators'},
},
'System: Delete Vaults': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'delete'},
'default_privileges': {'Vault Administrators'},
},
'System: Modify Vaults': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'write'},
'ipapermdefaultattr': {
'objectclass', 'cn', 'description', 'ipavaulttype',
'ipavaultsalt', 'ipavaultpublickey',
},
'default_privileges': {'Vault Administrators'},
},
'System: Manage Vault Ownership': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'write'},
'ipapermdefaultattr': {
'owner',
},
'default_privileges': {'Vault Administrators'},
},
'System: Manage Vault Membership': {
'ipapermlocation': api.env.basedn,
'ipapermtarget': DN(api.env.container_vault, api.env.basedn),
'ipapermright': {'write'},
'ipapermdefaultattr': {
'member',
},
'default_privileges': {'Vault Administrators'},
},
}
takes_params = (
Str(
'cn',
cli_name='name',
label=_('Vault name'),
primary_key=True,
pattern='^[a-zA-Z0-9_.-]+$',
pattern_errmsg='may only include letters, numbers, _, ., and -',
maxlength=255,
),
Str(
'description?',
cli_name='desc',
label=_('Description'),
doc=_('Vault description'),
),
StrEnum(
'ipavaulttype?',
cli_name='type',
label=_('Type'),
doc=_('Vault type'),
values=(u'standard', u'symmetric', u'asymmetric', ),
default=u'symmetric',
autofill=True,
),
Bytes(
'ipavaultsalt?',
cli_name='salt',
label=_('Salt'),
doc=_('Vault salt'),
flags=['no_search'],
),
Bytes(
'ipavaultpublickey?',
cli_name='public_key',
label=_('Public key'),
doc=_('Vault public key'),
flags=['no_search'],
),
Str(
'owner_user?',
label=_('Owner users'),
flags=['no_create', 'no_update', 'no_search'],
),
Str(
'owner_group?',
label=_('Owner groups'),
flags=['no_create', 'no_update', 'no_search'],
),
Str(
'owner_service?',
label=_('Owner services'),
flags=['no_create', 'no_update', 'no_search'],
),
Str(
'owner?',
label=_('Failed owners'),
flags=['no_create', 'no_update', 'no_search'],
),
Str(
'service?',
label=_('Vault service'),
flags={'virtual_attribute', 'no_create', 'no_update', 'no_search'},
),
Flag(
'shared?',
label=_('Shared vault'),
flags={'virtual_attribute', 'no_create', 'no_update', 'no_search'},
),
Str(
'username?',
label=_('Vault user'),
flags={'virtual_attribute', 'no_create', 'no_update', 'no_search'},
),
)
def get_dn(self, *keys, **options):
"""
Generates vault DN from parameters.
"""
service = options.get('service')
shared = options.get('shared')
user = options.get('username')
count = (bool(service) + bool(shared) + bool(user))
if count > 1:
raise errors.MutuallyExclusiveError(
reason=_('Service, shared, and user options ' +
'cannot be specified simultaneously'))
# TODO: create container_dn after object initialization then reuse it
container_dn = DN(self.container_dn, self.api.env.basedn)
dn = super(vault, self).get_dn(*keys, **options)
assert dn.endswith(container_dn)
rdns = DN(*dn[:-len(container_dn)])
if not count:
principal = getattr(context, 'principal')
if principal.startswith('host/'):
raise errors.NotImplementedError(
reason=_('Host is not supported'))
(name, realm) = split_principal(principal)
if '/' in name:
service = principal
else:
user = name
if service:
parent_dn = DN(('cn', service), ('cn', 'services'), container_dn)
elif shared:
parent_dn = DN(('cn', 'shared'), container_dn)
elif user:
parent_dn = DN(('cn', user), ('cn', 'users'), container_dn)
else:
raise RuntimeError
return DN(rdns, parent_dn)
def create_container(self, dn, owner_dn):
"""
Creates vault container and its parents.
"""
# TODO: create container_dn after object initialization then reuse it
container_dn = DN(self.container_dn, self.api.env.basedn)
entries = []
while dn:
assert dn.endswith(container_dn)
rdn = dn[0]
entry = self.backend.make_entry(
dn,
{
'objectclass': ['ipaVaultContainer'],
'cn': rdn['cn'],
'owner': [owner_dn],
})
# if entry can be added, return
try:
self.backend.add_entry(entry)
break
except errors.NotFound:
pass
# otherwise, create parent entry first
dn = DN(*dn[1:])
entries.insert(0, entry)
# then create the entries again
for entry in entries:
self.backend.add_entry(entry)
def get_key_id(self, dn):
"""
Generates a client key ID to archive/retrieve data in KRA.
"""
# TODO: create container_dn after object initialization then reuse it
container_dn = DN(self.container_dn, self.api.env.basedn)
# make sure the DN is a vault DN
if not dn.endswith(container_dn, 1):
raise ValueError('Invalid vault DN: %s' % dn)
# construct the vault ID from the bottom up
id = u''
for rdn in dn[:-len(container_dn)]:
name = rdn['cn']
id = u'/' + name + id
return 'ipa:' + id
def get_new_password(self):
"""
Gets new password from user and verify it.
"""
while True:
password = getpass.getpass('New password: ').decode(
sys.stdin.encoding)
password2 = getpass.getpass('Verify password: ').decode(
sys.stdin.encoding)
if password == password2:
return password
print(' ** Passwords do not match! **')
def get_existing_password(self):
"""
Gets existing password from user.
"""
return getpass.getpass('Password: ').decode(sys.stdin.encoding)
def generate_symmetric_key(self, password, salt):
"""
Generates symmetric key from password and salt.
"""
kdf = PBKDF2HMAC(
algorithm=hashes.SHA256(),
length=32,
salt=salt,
iterations=100000,
backend=default_backend()
)
return base64.b64encode(kdf.derive(password.encode('utf-8')))
def encrypt(self, data, symmetric_key=None, public_key=None):
"""
Encrypts data with symmetric key or public key.
"""
if symmetric_key:
fernet = Fernet(symmetric_key)
return fernet.encrypt(data)
elif public_key:
public_key_obj = load_pem_public_key(
data=public_key,
backend=default_backend()
)
return public_key_obj.encrypt(
data,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
)
def decrypt(self, data, symmetric_key=None, private_key=None):
"""
Decrypts data with symmetric key or public key.
"""
if symmetric_key:
try:
fernet = Fernet(symmetric_key)
return fernet.decrypt(data)
except InvalidToken:
raise errors.AuthenticationError(
message=_('Invalid credentials'))
elif private_key:
try:
private_key_obj = load_pem_private_key(
data=private_key,
password=None,
backend=default_backend()
)
return private_key_obj.decrypt(
data,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
)
)
except AssertionError:
raise errors.AuthenticationError(
message=_('Invalid credentials'))
def get_container_attribute(self, entry, options):
if options.get('raw', False):
return
container_dn = DN(self.container_dn, self.api.env.basedn)
if entry.dn.endswith(DN(('cn', 'services'), container_dn)):
entry['service'] = entry.dn[1]['cn']
elif entry.dn.endswith(DN(('cn', 'shared'), container_dn)):
entry['shared'] = True
elif entry.dn.endswith(DN(('cn', 'users'), container_dn)):
entry['username'] = entry.dn[1]['cn']
@register()
class vault_add(PKQuery, Local):
__doc__ = _('Create a new vault.')
takes_options = LDAPCreate.takes_options + vault_options + (
Str(
'description?',
cli_name='desc',
doc=_('Vault description'),
),
StrEnum(
'ipavaulttype?',
cli_name='type',
label=_('Type'),
doc=_('Vault type'),
values=(u'standard', u'symmetric', u'asymmetric', ),
default=u'symmetric',
autofill=True,
),
Str(
'password?',
cli_name='password',
doc=_('Vault password'),
),
Str( # TODO: use File parameter
'password_file?',
cli_name='password_file',
doc=_('File containing the vault password'),
),
Bytes(
'ipavaultpublickey?',
cli_name='public_key',
doc=_('Vault public key'),
),
Str( # TODO: use File parameter
'public_key_file?',
cli_name='public_key_file',
doc=_('File containing the vault public key'),
),
)
has_output = output.standard_entry
def forward(self, *args, **options):
vault_type = options.get('ipavaulttype')
password = options.get('password')
password_file = options.get('password_file')
public_key = options.get('ipavaultpublickey')
public_key_file = options.get('public_key_file')
# don't send these parameters to server
if 'password' in options:
del options['password']
if 'password_file' in options:
del options['password_file']
if 'public_key_file' in options:
del options['public_key_file']
if vault_type != u'symmetric' and (password or password_file):
raise errors.MutuallyExclusiveError(
reason=_('Password can be specified only for '
'symmetric vault')
)
if vault_type != u'asymmetric' and (public_key or public_key_file):
raise errors.MutuallyExclusiveError(
reason=_('Public key can be specified only for '
'asymmetric vault')
)
if self.api.env.in_server:
backend = self.api.Backend.ldap2
else:
backend = self.api.Backend.rpcclient
if not backend.isconnected():
backend.connect()
if vault_type == u'standard':
pass
elif vault_type == u'symmetric':
# get password
if password and password_file:
raise errors.MutuallyExclusiveError(
reason=_('Password specified multiple times'))
elif password:
pass
elif password_file:
password = validated_read('password-file',
password_file,
encoding='utf-8')
password = password.rstrip('\n')
else:
password = self.obj.get_new_password()
# generate vault salt
options['ipavaultsalt'] = os.urandom(16)
elif vault_type == u'asymmetric':
# get new vault public key
if public_key and public_key_file:
raise errors.MutuallyExclusiveError(
reason=_('Public key specified multiple times'))
elif public_key:
pass
elif public_key_file:
public_key = validated_read('public-key-file',
public_key_file,
mode='rb')
# store vault public key
options['ipavaultpublickey'] = public_key
else:
raise errors.ValidationError(
name='ipavaultpublickey',
error=_('Missing vault public key'))
# validate public key and prevent users from accidentally
# sending a private key to the server.
try:
load_pem_public_key(
data=public_key,
backend=default_backend()
)
except ValueError as e:
raise errors.ValidationError(
name='ipavaultpublickey',
error=_('Invalid or unsupported vault public key: %s') % e,
)
# create vault
response = self.api.Command.vault_add_internal(*args, **options)
# prepare parameters for archival
opts = options.copy()
if 'description' in opts:
del opts['description']
if 'ipavaulttype' in opts:
del opts['ipavaulttype']
if vault_type == u'symmetric':
opts['password'] = password
del opts['ipavaultsalt']
elif vault_type == u'asymmetric':
del opts['ipavaultpublickey']
# archive blank data
self.api.Command.vault_archive(*args, **opts)
return response
@register()
class vault_add_internal(LDAPCreate):
NO_CLI = True
takes_options = vault_options
msg_summary = _('Added vault "%(value)s"')
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys,
**options):
assert isinstance(dn, DN)
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
principal = getattr(context, 'principal')
(name, realm) = split_principal(principal)
if '/' in name:
owner_dn = self.api.Object.service.get_dn(name)
else:
owner_dn = self.api.Object.user.get_dn(name)
parent_dn = DN(*dn[1:])
try:
self.obj.create_container(parent_dn, owner_dn)
except errors.DuplicateEntry as e:
pass
# vault should be owned by the creator
entry_attrs['owner'] = owner_dn
return dn
def post_callback(self, ldap, dn, entry, *keys, **options):
self.obj.get_container_attribute(entry, options)
return dn
@register()
class vault_del(LDAPDelete):
__doc__ = _('Delete a vault.')
takes_options = LDAPDelete.takes_options + vault_options
msg_summary = _('Deleted vault "%(value)s"')
def pre_callback(self, ldap, dn, *keys, **options):
assert isinstance(dn, DN)
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
return dn
def post_callback(self, ldap, dn, *args, **options):
assert isinstance(dn, DN)
kra_client = self.api.Backend.kra.get_client()
kra_account = pki.account.AccountClient(kra_client.connection)
kra_account.login()
client_key_id = self.obj.get_key_id(dn)
# deactivate vault record in KRA
response = kra_client.keys.list_keys(
client_key_id, pki.key.KeyClient.KEY_STATUS_ACTIVE)
for key_info in response.key_infos:
kra_client.keys.modify_key_status(
key_info.get_key_id(),
pki.key.KeyClient.KEY_STATUS_INACTIVE)
kra_account.logout()
return True
@register()
class vault_find(LDAPSearch):
__doc__ = _('Search for vaults.')
takes_options = LDAPSearch.takes_options + vault_options + (
Flag(
'services?',
doc=_('List all service vaults'),
),
Flag(
'users?',
doc=_('List all user vaults'),
),
)
has_output_params = LDAPSearch.has_output_params
msg_summary = ngettext(
'%(count)d vault matched',
'%(count)d vaults matched',
0,
)
def pre_callback(self, ldap, filter, attrs_list, base_dn, scope, *args,
**options):
assert isinstance(base_dn, DN)
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
if options.get('users') or options.get('services'):
mutex = ['service', 'services', 'shared', 'username', 'users']
count = sum(bool(options.get(option)) for option in mutex)
if count > 1:
raise errors.MutuallyExclusiveError(
reason=_('Service(s), shared, and user(s) options ' +
'cannot be specified simultaneously'))
scope = ldap.SCOPE_SUBTREE
container_dn = DN(self.obj.container_dn,
self.api.env.basedn)
if options.get('services'):
base_dn = DN(('cn', 'services'), container_dn)
else:
base_dn = DN(('cn', 'users'), container_dn)
else:
base_dn = self.obj.get_dn(None, **options)
return filter, base_dn, scope
def post_callback(self, ldap, entries, truncated, *args, **options):
for entry in entries:
self.obj.get_container_attribute(entry, options)
return truncated
def exc_callback(self, args, options, exc, call_func, *call_args,
**call_kwargs):
if call_func.__name__ == 'find_entries':
if isinstance(exc, errors.NotFound):
# ignore missing containers since they will be created
# automatically on vault creation.
raise errors.EmptyResult(reason=str(exc))
raise exc
@register()
class vault_mod(PKQuery, Local):
__doc__ = _('Modify a vault.')
takes_options = vault_options + (
Str(
'description?',
cli_name='desc',
doc=_('Vault description'),
),
Str(
'ipavaulttype?',
cli_name='type',
doc=_('Vault type'),
),
Bytes(
'ipavaultsalt?',
cli_name='salt',
doc=_('Vault salt'),
),
Flag(
'change_password?',
doc=_('Change password'),
),
Str(
'old_password?',
cli_name='old_password',
doc=_('Old vault password'),
),
Str( # TODO: use File parameter
'old_password_file?',
cli_name='old_password_file',
doc=_('File containing the old vault password'),
),
Str(
'new_password?',
cli_name='new_password',
doc=_('New vault password'),
),
Str( # TODO: use File parameter
'new_password_file?',
cli_name='new_password_file',
doc=_('File containing the new vault password'),
),
Bytes(
'private_key?',
cli_name='private_key',
doc=_('Old vault private key'),
),
Str( # TODO: use File parameter
'private_key_file?',
cli_name='private_key_file',
doc=_('File containing the old vault private key'),
),
Bytes(
'ipavaultpublickey?',
cli_name='public_key',
doc=_('New vault public key'),
),
Str( # TODO: use File parameter
'public_key_file?',
cli_name='public_key_file',
doc=_('File containing the new vault public key'),
),
)
has_output = output.standard_entry
def forward(self, *args, **options):
vault_type = options.pop('ipavaulttype', False)
salt = options.pop('ipavaultsalt', False)
change_password = options.pop('change_password', False)
old_password = options.pop('old_password', None)
old_password_file = options.pop('old_password_file', None)
new_password = options.pop('new_password', None)
new_password_file = options.pop('new_password_file', None)
old_private_key = options.pop('private_key', None)
old_private_key_file = options.pop('private_key_file', None)
new_public_key = options.pop('ipavaultpublickey', None)
new_public_key_file = options.pop('public_key_file', None)
if self.api.env.in_server:
backend = self.api.Backend.ldap2
else:
backend = self.api.Backend.rpcclient
if not backend.isconnected():
backend.connect()
# determine the vault type based on parameters specified
if vault_type:
pass
elif change_password or new_password or new_password_file or salt:
vault_type = u'symmetric'
elif new_public_key or new_public_key_file:
vault_type = u'asymmetric'
# if vault type is specified, retrieve existing secret
if vault_type:
opts = options.copy()
opts.pop('description', None)
opts['password'] = old_password
opts['password_file'] = old_password_file
opts['private_key'] = old_private_key
opts['private_key_file'] = old_private_key_file
response = self.api.Command.vault_retrieve(*args, **opts)
data = response['result']['data']
opts = options.copy()
# if vault type is specified, update crypto attributes
if vault_type:
opts['ipavaulttype'] = vault_type
if vault_type == u'standard':
opts['ipavaultsalt'] = None
opts['ipavaultpublickey'] = None
elif vault_type == u'symmetric':
if salt:
opts['ipavaultsalt'] = salt
else:
opts['ipavaultsalt'] = os.urandom(16)
opts['ipavaultpublickey'] = None
elif vault_type == u'asymmetric':
# get new vault public key
if new_public_key and new_public_key_file:
raise errors.MutuallyExclusiveError(
reason=_('New public key specified multiple times'))
elif new_public_key:
pass
elif new_public_key_file:
new_public_key = validated_read('public_key_file',
new_public_key_file,
mode='rb')
else:
raise errors.ValidationError(
name='ipavaultpublickey',
error=_('Missing new vault public key'))
opts['ipavaultsalt'] = None
opts['ipavaultpublickey'] = new_public_key
response = self.api.Command.vault_mod_internal(*args, **opts)
# if vault type is specified, rearchive existing secret
if vault_type:
opts = options.copy()
opts.pop('description', None)
opts['data'] = data
opts['password'] = new_password
opts['password_file'] = new_password_file
opts['override_password'] = True
self.api.Command.vault_archive(*args, **opts)
return response
@register()
class vault_mod_internal(LDAPUpdate):
NO_CLI = True
takes_options = LDAPUpdate.takes_options + vault_options
msg_summary = _('Modified vault "%(value)s"')
def pre_callback(self, ldap, dn, entry_attrs, attrs_list,
*keys, **options):
assert isinstance(dn, DN)
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
return dn
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
self.obj.get_container_attribute(entry_attrs, options)
return dn
@register()
class vault_show(LDAPRetrieve):
__doc__ = _('Display information about a vault.')
takes_options = LDAPRetrieve.takes_options + vault_options
has_output_params = LDAPRetrieve.has_output_params
def pre_callback(self, ldap, dn, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
return dn
def post_callback(self, ldap, dn, entry_attrs, *keys, **options):
self.obj.get_container_attribute(entry_attrs, options)
return dn
@register()
class vaultconfig(Object):
__doc__ = _('Vault configuration')
takes_params = (
Bytes(
'transport_cert',
label=_('Transport Certificate'),
),
)
@register()
class vaultconfig_show(Retrieve):
__doc__ = _('Show vault configuration.')
takes_options = (
Str(
'transport_out?',
doc=_('Output file to store the transport certificate'),
),
)
def forward(self, *args, **options):
file = options.get('transport_out')
# don't send these parameters to server
if 'transport_out' in options:
del options['transport_out']
response = super(vaultconfig_show, self).forward(*args, **options)
if file:
with open(file, 'w') as f:
f.write(response['result']['transport_cert'])
return response
def execute(self, *args, **options):
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
kra_client = self.api.Backend.kra.get_client()
transport_cert = kra_client.system_certs.get_transport_cert()
return {
'result': {
'transport_cert': transport_cert.binary
},
'value': None,
}
@register()
class vault_archive(PKQuery, Local):
__doc__ = _('Archive data into a vault.')
takes_options = vault_options + (
Bytes(
'data?',
doc=_('Binary data to archive'),
),
Str( # TODO: use File parameter
'in?',
doc=_('File containing data to archive'),
),
Str(
'password?',
cli_name='password',
doc=_('Vault password'),
),
Str( # TODO: use File parameter
'password_file?',
cli_name='password_file',
doc=_('File containing the vault password'),
),
Flag(
'override_password?',
doc=_('Override existing password'),
),
)
has_output = output.standard_entry
def forward(self, *args, **options):
name = args[-1]
data = options.get('data')
input_file = options.get('in')
password = options.get('password')
password_file = options.get('password_file')
override_password = options.pop('override_password', False)
# don't send these parameters to server
if 'data' in options:
del options['data']
if 'in' in options:
del options['in']
if 'password' in options:
del options['password']
if 'password_file' in options:
del options['password_file']
# get data
if data and input_file:
raise errors.MutuallyExclusiveError(
reason=_('Input data specified multiple times'))
elif data:
if len(data) > MAX_VAULT_DATA_SIZE:
raise errors.ValidationError(name="data", error=_(
"Size of data exceeds the limit. Current vault data size "
"limit is %(limit)d B")
% {'limit': MAX_VAULT_DATA_SIZE})
elif input_file:
try:
stat = os.stat(input_file)
except OSError as exc:
raise errors.ValidationError(name="in", error=_(
"Cannot read file '%(filename)s': %(exc)s")
% {'filename': input_file, 'exc': exc.args[1]})
if stat.st_size > MAX_VAULT_DATA_SIZE:
raise errors.ValidationError(name="in", error=_(
"Size of data exceeds the limit. Current vault data size "
"limit is %(limit)d B")
% {'limit': MAX_VAULT_DATA_SIZE})
data = validated_read('in', input_file, mode='rb')
else:
data = ''
if self.api.env.in_server:
backend = self.api.Backend.ldap2
else:
backend = self.api.Backend.rpcclient
if not backend.isconnected():
backend.connect()
# retrieve vault info
vault = self.api.Command.vault_show(*args, **options)['result']
vault_type = vault['ipavaulttype'][0]
if vault_type == u'standard':
encrypted_key = None
elif vault_type == u'symmetric':
# get password
if password and password_file:
raise errors.MutuallyExclusiveError(
reason=_('Password specified multiple times'))
elif password:
pass
elif password_file:
password = validated_read('password-file',
password_file,
encoding='utf-8')
password = password.rstrip('\n')
else:
if override_password:
password = self.obj.get_new_password()
else:
password = self.obj.get_existing_password()
if not override_password:
# verify password by retrieving existing data
opts = options.copy()
opts['password'] = password
try:
self.api.Command.vault_retrieve(*args, **opts)
except errors.NotFound:
pass
salt = vault['ipavaultsalt'][0]
# generate encryption key from vault password
encryption_key = self.obj.generate_symmetric_key(
password, salt)
# encrypt data with encryption key
data = self.obj.encrypt(data, symmetric_key=encryption_key)
encrypted_key = None
elif vault_type == u'asymmetric':
public_key = vault['ipavaultpublickey'][0].encode('utf-8')
# generate encryption key
encryption_key = base64.b64encode(os.urandom(32))
# encrypt data with encryption key
data = self.obj.encrypt(data, symmetric_key=encryption_key)
# encrypt encryption key with public key
encrypted_key = self.obj.encrypt(
encryption_key, public_key=public_key)
else:
raise errors.ValidationError(
name='vault_type',
error=_('Invalid vault type'))
# initialize NSS database
current_dbdir = paths.IPA_NSSDB_DIR
nss.nss_init(current_dbdir)
# retrieve transport certificate
config = self.api.Command.vaultconfig_show()['result']
transport_cert_der = config['transport_cert']
nss_transport_cert = nss.Certificate(transport_cert_der)
# generate session key
mechanism = nss.CKM_DES3_CBC_PAD
slot = nss.get_best_slot(mechanism)
key_length = slot.get_best_key_length(mechanism)
session_key = slot.key_gen(mechanism, None, key_length)
# wrap session key with transport certificate
# pylint: disable=no-member
public_key = nss_transport_cert.subject_public_key_info.public_key
# pylint: enable=no-member
wrapped_session_key = nss.pub_wrap_sym_key(mechanism,
public_key,
session_key)
options['session_key'] = wrapped_session_key.data
nonce_length = nss.get_iv_length(mechanism)
nonce = nss.generate_random(nonce_length)
options['nonce'] = nonce
vault_data = {}
vault_data[u'data'] = base64.b64encode(data).decode('utf-8')
if encrypted_key:
vault_data[u'encrypted_key'] = base64.b64encode(encrypted_key)\
.decode('utf-8')
json_vault_data = json.dumps(vault_data)
# wrap vault_data with session key
iv_si = nss.SecItem(nonce)
iv_param = nss.param_from_iv(mechanism, iv_si)
encoding_ctx = nss.create_context_by_sym_key(mechanism,
nss.CKA_ENCRYPT,
session_key,
iv_param)
wrapped_vault_data = encoding_ctx.cipher_op(json_vault_data)\
+ encoding_ctx.digest_final()
options['vault_data'] = wrapped_vault_data
return self.api.Command.vault_archive_internal(*args, **options)
@register()
class vault_archive_internal(PKQuery):
NO_CLI = True
takes_options = vault_options + (
Bytes(
'session_key',
doc=_('Session key wrapped with transport certificate'),
),
Bytes(
'vault_data',
doc=_('Vault data encrypted with session key'),
),
Bytes(
'nonce',
doc=_('Nonce'),
),
)
has_output = output.standard_entry
msg_summary = _('Archived data into vault "%(value)s"')
def execute(self, *args, **options):
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
wrapped_vault_data = options.pop('vault_data')
nonce = options.pop('nonce')
wrapped_session_key = options.pop('session_key')
# retrieve vault info
vault = self.api.Command.vault_show(*args, **options)['result']
# connect to KRA
kra_client = self.api.Backend.kra.get_client()
kra_account = pki.account.AccountClient(kra_client.connection)
kra_account.login()
client_key_id = self.obj.get_key_id(vault['dn'])
# deactivate existing vault record in KRA
response = kra_client.keys.list_keys(
client_key_id,
pki.key.KeyClient.KEY_STATUS_ACTIVE)
for key_info in response.key_infos:
kra_client.keys.modify_key_status(
key_info.get_key_id(),
pki.key.KeyClient.KEY_STATUS_INACTIVE)
# forward wrapped data to KRA
kra_client.keys.archive_encrypted_data(
client_key_id,
pki.key.KeyClient.PASS_PHRASE_TYPE,
wrapped_vault_data,
wrapped_session_key,
None,
nonce,
)
kra_account.logout()
response = {
'value': args[-1],
'result': {},
}
response['summary'] = self.msg_summary % response
return response
@register()
class vault_retrieve(PKQuery, Local):
__doc__ = _('Retrieve a data from a vault.')
takes_options = vault_options + (
Str(
'out?',
doc=_('File to store retrieved data'),
),
Str(
'password?',
cli_name='password',
doc=_('Vault password'),
),
Str( # TODO: use File parameter
'password_file?',
cli_name='password_file',
doc=_('File containing the vault password'),
),
Bytes(
'private_key?',
cli_name='private_key',
doc=_('Vault private key'),
),
Str( # TODO: use File parameter
'private_key_file?',
cli_name='private_key_file',
doc=_('File containing the vault private key'),
),
)
has_output = output.standard_entry
has_output_params = (
Bytes(
'data',
label=_('Data'),
),
)
def forward(self, *args, **options):
name = args[-1]
output_file = options.get('out')
password = options.get('password')
password_file = options.get('password_file')
private_key = options.get('private_key')
private_key_file = options.get('private_key_file')
# don't send these parameters to server
if 'out' in options:
del options['out']
if 'password' in options:
del options['password']
if 'password_file' in options:
del options['password_file']
if 'private_key' in options:
del options['private_key']
if 'private_key_file' in options:
del options['private_key_file']
if self.api.env.in_server:
backend = self.api.Backend.ldap2
else:
backend = self.api.Backend.rpcclient
if not backend.isconnected():
backend.connect()
# retrieve vault info
vault = self.api.Command.vault_show(*args, **options)['result']
vault_type = vault['ipavaulttype'][0]
# initialize NSS database
current_dbdir = paths.IPA_NSSDB_DIR
nss.nss_init(current_dbdir)
# retrieve transport certificate
config = self.api.Command.vaultconfig_show()['result']
transport_cert_der = config['transport_cert']
nss_transport_cert = nss.Certificate(transport_cert_der)
# generate session key
mechanism = nss.CKM_DES3_CBC_PAD
slot = nss.get_best_slot(mechanism)
key_length = slot.get_best_key_length(mechanism)
session_key = slot.key_gen(mechanism, None, key_length)
# wrap session key with transport certificate
# pylint: disable=no-member
public_key = nss_transport_cert.subject_public_key_info.public_key
# pylint: enable=no-member
wrapped_session_key = nss.pub_wrap_sym_key(mechanism,
public_key,
session_key)
# send retrieval request to server
options['session_key'] = wrapped_session_key.data
response = self.api.Command.vault_retrieve_internal(*args, **options)
result = response['result']
nonce = result['nonce']
# unwrap data with session key
wrapped_vault_data = result['vault_data']
iv_si = nss.SecItem(nonce)
iv_param = nss.param_from_iv(mechanism, iv_si)
decoding_ctx = nss.create_context_by_sym_key(mechanism,
nss.CKA_DECRYPT,
session_key,
iv_param)
json_vault_data = decoding_ctx.cipher_op(wrapped_vault_data)\
+ decoding_ctx.digest_final()
vault_data = json.loads(json_vault_data)
data = base64.b64decode(vault_data[u'data'].encode('utf-8'))
encrypted_key = None
if 'encrypted_key' in vault_data:
encrypted_key = base64.b64decode(vault_data[u'encrypted_key']
.encode('utf-8'))
if vault_type == u'standard':
pass
elif vault_type == u'symmetric':
salt = vault['ipavaultsalt'][0]
# get encryption key from vault password
if password and password_file:
raise errors.MutuallyExclusiveError(
reason=_('Password specified multiple times'))
elif password:
pass
elif password_file:
password = validated_read('password-file',
password_file,
encoding='utf-8')
password = password.rstrip('\n')
else:
password = self.obj.get_existing_password()
# generate encryption key from password
encryption_key = self.obj.generate_symmetric_key(password, salt)
# decrypt data with encryption key
data = self.obj.decrypt(data, symmetric_key=encryption_key)
elif vault_type == u'asymmetric':
# get encryption key with vault private key
if private_key and private_key_file:
raise errors.MutuallyExclusiveError(
reason=_('Private key specified multiple times'))
elif private_key:
pass
elif private_key_file:
private_key = validated_read('private-key-file',
private_key_file,
mode='rb')
else:
raise errors.ValidationError(
name='private_key',
error=_('Missing vault private key'))
# decrypt encryption key with private key
encryption_key = self.obj.decrypt(
encrypted_key, private_key=private_key)
# decrypt data with encryption key
data = self.obj.decrypt(data, symmetric_key=encryption_key)
else:
raise errors.ValidationError(
name='vault_type',
error=_('Invalid vault type'))
if output_file:
with open(output_file, 'w') as f:
f.write(data)
else:
response['result'] = {'data': data}
return response
@register()
class vault_retrieve_internal(PKQuery):
NO_CLI = True
takes_options = vault_options + (
Bytes(
'session_key',
doc=_('Session key wrapped with transport certificate'),
),
)
has_output = output.standard_entry
msg_summary = _('Retrieved data from vault "%(value)s"')
def execute(self, *args, **options):
if not self.api.Command.kra_is_enabled()['result']:
raise errors.InvocationError(
format=_('KRA service is not enabled'))
wrapped_session_key = options.pop('session_key')
# retrieve vault info
vault = self.api.Command.vault_show(*args, **options)['result']
# connect to KRA
kra_client = self.api.Backend.kra.get_client()
kra_account = pki.account.AccountClient(kra_client.connection)
kra_account.login()
client_key_id = self.obj.get_key_id(vault['dn'])
# find vault record in KRA
response = kra_client.keys.list_keys(
client_key_id,
pki.key.KeyClient.KEY_STATUS_ACTIVE)
if not len(response.key_infos):
raise errors.NotFound(reason=_('No archived data.'))
key_info = response.key_infos[0]
# retrieve encrypted data from KRA
key = kra_client.keys.retrieve_key(
key_info.get_key_id(),
wrapped_session_key)
kra_account.logout()
response = {
'value': args[-1],
'result': {
'vault_data': key.encrypted_data,
'nonce': key.nonce_data,
},
}
response['summary'] = self.msg_summary % response
return response
@register()
class vault_add_owner(VaultModMember, LDAPAddMember):
__doc__ = _('Add owners to a vault.')
takes_options = LDAPAddMember.takes_options + vault_options
member_attributes = ['owner']
member_param_label = _('owner %s')
member_count_out = ('%i owner added.', '%i owners added.')
has_output = (
output.Entry('result'),
output.Output(
'failed',
type=dict,
doc=_('Owners that could not be added'),
),
output.Output(
'completed',
type=int,
doc=_('Number of owners added'),
),
)
@register()
class vault_remove_owner(VaultModMember, LDAPRemoveMember):
__doc__ = _('Remove owners from a vault.')
takes_options = LDAPRemoveMember.takes_options + vault_options
member_attributes = ['owner']
member_param_label = _('owner %s')
member_count_out = ('%i owner removed.', '%i owners removed.')
has_output = (
output.Entry('result'),
output.Output(
'failed',
type=dict,
doc=_('Owners that could not be removed'),
),
output.Output(
'completed',
type=int,
doc=_('Number of owners removed'),
),
)
@register()
class vault_add_member(VaultModMember, LDAPAddMember):
__doc__ = _('Add members to a vault.')
takes_options = LDAPAddMember.takes_options + vault_options
@register()
class vault_remove_member(VaultModMember, LDAPRemoveMember):
__doc__ = _('Remove members from a vault.')
takes_options = LDAPRemoveMember.takes_options + vault_options
@register()
class kra_is_enabled(Command):
NO_CLI = True
has_output = output.standard_value
def execute(self, *args, **options):
base_dn = DN(('cn', 'masters'), ('cn', 'ipa'), ('cn', 'etc'),
self.api.env.basedn)
filter = '(&(objectClass=ipaConfigObject)(cn=KRA))'
try:
self.api.Backend.ldap2.find_entries(
base_dn=base_dn, filter=filter, attrs_list=[])
except errors.NotFound:
result = False
else:
result = True
return dict(result=result, value=pkey_to_value(None, options))
| tbabej/freeipa | ipalib/plugins/vault.py | Python | gpl-3.0 | 65,598 |
#!/usr/bin/python
# service_proxy_server.py
#
# Copyright (C) 2008-2018 Veselin Penev, https://bitdust.io
#
# This file (service_proxy_server.py) is part of BitDust Software.
#
# BitDust is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BitDust Software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with BitDust Software. If not, see <http://www.gnu.org/licenses/>.
#
# Please contact us if you have any questions at [email protected]
#
#
#
#
"""
..
module:: service_proxy_server
"""
from __future__ import absolute_import
from services.local_service import LocalService
def create_service():
return ProxyServerService()
class ProxyServerService(LocalService):
service_name = 'service_proxy_server'
config_path = 'services/proxy-server/enabled'
# def init(self):
# self.debug_level = 2
# self.log_events = True
def dependent_on(self):
return ['service_p2p_hookups',
]
def enabled(self):
from main import settings
return settings.enableProxyServer()
def start(self):
from transport.proxy import proxy_router
proxy_router.A('init')
proxy_router.A('start')
return True
def stop(self):
from transport.proxy import proxy_router
proxy_router.A('stop')
proxy_router.A('shutdown')
return True
def request(self, json_payload, newpacket, info):
from transport.proxy import proxy_router
proxy_router.A('request-route-received', (json_payload, newpacket, info, ))
return True
def cancel(self, json_payload, newpacket, info):
from transport.proxy import proxy_router
proxy_router.A('cancel-route-received', (json_payload, newpacket, info, ))
return True
| vesellov/bitdust.devel | services/service_proxy_server.py | Python | agpl-3.0 | 2,236 |
# -*- coding: utf-8 -*-
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
try:
from .logger import Logger as _Logger
from .logger import deprecated, deprecated_argument
except Exception:
from logger import Logger as _Logger
from logger import deprecated, deprecated_argument
from gc import collect as _gccollect
import socket as _socket
import threading as _threading
from time import sleep as _sleep
from traceback import print_exc as _print_exc
__author__ = "Sergi Blanch-Torné"
__email__ = "[email protected]"
__copyright__ = "Copyright 2015, CELLS / ALBA Synchrotron"
__license__ = "GPLv3+"
_MAX_CLIENTS = 10
def splitter(data, sep='\r\n'):
"""
Split the incomming string to separate, in case there are, piled up
requests. But not confuse with the multiple commands in one request.
Multiple commands separator is ';', this split is to separate when
there are '\r' or '\n' or their pairs.
The separator is trimmed from each request in the returned list.
If data does not end in either '\r' or '\n', the remaining buffer
is returned has unprocessed data.
Examples::
>>> splitter(b'foo 1\rbar 2\n')
[b'foo 1', b'bar 2'], b''
>>> splitter(b'foo 1\rbar 2\nnext...')
[b'foo 1', b'bar 2'], b'next...'
:param data: data to separate
:return: answer: tuple of <list of requests>, <remaining characters>
"""
data = data.strip(' \t')
if not data:
return [], ''
for c in sep[1:]:
data = data.replace(c, sep[0])
result = (line.strip() for line in data.split(sep[0]))
result = [req for req in result if req]
has_remaining = data[-1] not in sep
remaining = result.pop(-1) if has_remaining else b''
return result, remaining
class TcpListener(_Logger):
"""
TODO: describe it
"""
# FIXME: default should be local=False
_callback = None
_connection_hooks = None
_max_clients = None
_join_event = None
_local = None
_port = None
_host_ipv4 = None
_listener_ipv4 = None
_socket_ipv4 = None
_with_ipv6_support = None
_host_ipv6 = None
_listener_ipv6 = None
_socket_ipv6 = None
def __init__(self, name=None, callback=None, local=True, port=5025,
max_clients=None, ipv6=True,
maxClients=None,
*args, **kwargs):
super(TcpListener, self).__init__(*args, **kwargs)
if maxClients is not None:
deprecated_argument("TcpListener", "__init__", "maxClients")
if max_clients is None:
max_clients = maxClients
if max_clients is None:
max_clients = _MAX_CLIENTS
self._name = name or "TcpListener"
self._callback = callback
self._connection_hooks = []
self._local = local
self._port = port
self._max_clients = max_clients
self._join_event = _threading.Event()
self._join_event.clear()
self._connection_threads = {}
self._with_ipv6_support = ipv6
self.open()
self._debug("Listener thread prepared")
def __enter__(self):
self._debug("received a enter() request")
if not self.isOpen:
self.open()
return self
def __exit__(self, type, value, traceback):
self._debug("received a exit({0},{1},{2}) request",
type, value, traceback)
self.__del__()
def __del__(self):
self.close()
def open(self):
self.build_ipv4_socket()
try:
self.build_ipv6_socket()
except Exception as exc:
self._error("IPv6 will not be available due to: {0}", exc)
def close(self):
if self._join_event.isSet():
return
self._debug("{0} close received", self._name)
if hasattr(self, '_join_event'):
self._debug("Deleting TcpListener")
self._join_event.set()
self._shutdown_socket(self._socket_ipv4)
if self._with_ipv6_support and hasattr(self, '_socket_ipv6'):
self._shutdown_socket(self._socket_ipv6)
if self._is_listening_ipv4():
self._socket_ipv4 = None
if self._with_ipv6_support and self._is_listening_ipv6():
self._socket_ipv6 = None
_gccollect()
while self.is_alive():
_gccollect()
self._debug("Waiting for Listener threads")
_sleep(1)
self._debug("Everything is close, exiting...")
@property
def port(self):
return self._port
@property
def local(self):
return self._local
def listen(self):
self._debug("Launching listener thread")
self._listener_ipv4.start()
if hasattr(self, '_listener_ipv6'):
self._listener_ipv6.start()
def is_alive(self):
return self._is_ipv4_listener_alive() or self._is_ipv6_listener_alive()
@deprecated
def isAlive(self):
return self.is_alive()
def is_listening(self):
return self._is_listening_ipv4() or self._is_listening_ipv6()
@deprecated
def isListening(self):
return self.is_listening()
def build_ipv4_socket(self):
if self._local:
self._host_ipv4 = '127.0.0.1'
else:
self._host_ipv4 = '0.0.0.0'
self._socket_ipv4 = _socket.socket(
_socket.AF_INET, _socket.SOCK_STREAM)
self._socket_ipv4.setsockopt(
_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1)
self._listener_ipv4 = _threading.Thread(name="Listener4",
target=self.__listener,
args=(self._socket_ipv4,
self._host_ipv4,))
self._listener_ipv4.setDaemon(True)
@deprecated
def buildIpv4Socket(self):
return self.build_ipv4_socket()
def build_ipv6_socket(self):
if self._with_ipv6_support:
if not _socket.has_ipv6:
raise AssertionError("IPv6 not supported by the platform")
if self._local:
self._host_ipv6 = '::1'
else:
self._host_ipv6 = '::'
self._socket_ipv6 = _socket.socket(_socket.AF_INET6,
_socket.SOCK_STREAM)
self._socket_ipv6.setsockopt(_socket.IPPROTO_IPV6,
_socket.IPV6_V6ONLY, True)
self._socket_ipv6.setsockopt(_socket.SOL_SOCKET,
_socket.SO_REUSEADDR, 1)
self._listener_ipv6 = _threading.Thread(name="Listener6",
target=self.__listener,
args=(self._socket_ipv6,
self._host_ipv6,))
self._listener_ipv6.setDaemon(True)
@deprecated
def buildIpv6Socket(self):
return self.build_ipv6_socket()
@staticmethod
def _shutdown_socket(sock):
try:
sock.shutdown(_socket.SHUT_RDWR)
except Exception as e:
_print_exc()
def _is_ipv4_listener_alive(self):
return self._listener_ipv4.is_alive()
def _is_ipv6_listener_alive(self):
if hasattr(self, '_listener_ipv6'):
return self._listener_ipv6.is_alive()
return False
def __listener(self, scpisocket, scpihost):
try:
self.__prepare_listener(scpisocket, scpihost, 5)
self.__do_listen(scpisocket)
self._debug("Listener thread finishing")
except SystemExit as exc:
self._debug("Received a SystemExit ({0})", exc)
self.__del__()
except KeyboardInterrupt as exc:
self._debug("Received a KeyboardInterrupt ({0})", exc)
self.__del__()
except GeneratorExit as exc:
self._debug("Received a GeneratorExit ({0})", exc)
self.__del__()
def __prepare_listener(self, scpisocket, scpihost, maxretries):
listening = False
tries = 0
seconds = 3
while tries < maxretries:
try:
scpisocket.bind((scpihost, self._port))
scpisocket.listen(self._max_clients)
self._debug("Listener thread up and running (port {0:d}, with "
"a maximum of {1:d} connections in parallel).",
self._port, self._max_clients)
return True
except Exception as exc:
tries += 1
self._error("Couldn't bind the socket. {0}\nException: {1}",
"(Retry in {0:d} seconds)".format(seconds)
if tries < maxretries else "(No more retries)",
exc)
_sleep(seconds)
return False
def __do_listen(self, scpisocket):
while not self._join_event.isSet():
try:
connection, address = scpisocket.accept()
except Exception as e:
if self._join_event.isSet():
self._debug("Closing Listener")
del scpisocket
return
# self._error("Socket Accept Exception: %s" % (e))
_sleep(3)
else:
self.__launch_connection(address, connection)
scpisocket.close()
def _is_listening_ipv4(self):
if hasattr(self, '_socket_ipv4') and \
hasattr(self._socket_ipv4, 'fileno'):
return bool(self._socket_ipv4.fileno())
return False
def _is_listening_ipv6(self):
if hasattr(self, '_socket_ipv6') and \
hasattr(self._socket_ipv6, 'fileno'):
return bool(self._socket_ipv6.fileno())
return False
@property
def active_connections(self):
return len(self._connection_threads)
def __launch_connection(self, address, connection):
connectionName = "{0}:{1}".format(address[0], address[1])
try:
self._debug("Connection request from {0} "
"(having {1:d} already active)",
connectionName, self.active_connections)
if connectionName in self._connection_threads and \
self._connection_threads[connectionName].is_Alive():
self.error("New connection from {0} when it has already "
"one. refusing the newer.", connectionName)
elif self.active_connections >= self._max_clients:
self._error("Reached the maximum number of allowed "
"connections ({0:d})", self.active_connections)
else:
self._connection_threads[connectionName] = \
_threading.Thread(name=connectionName,
target=self.__connection,
args=(address, connection))
self._debug("Connection for {0} created", connectionName)
self._connection_threads[connectionName].setDaemon(True)
self._connection_threads[connectionName].start()
except Exception as exc:
self._error("Cannot launch connection request from {0} due to: "
"{1}", connectionName, exc)
def __connection(self, address, connection):
connectionName = "{0}:{1}".format(address[0], address[1])
self._debug("Thread for {0} connection", connectionName)
stream = connection.makefile('rwb', bufsize=0)
remaining = b''
while not self._join_event.isSet():
data = stream.readline() # data = connection.recv(4096)
self._info("received from {0}: {1:d} bytes {2!r}",
connectionName, len(data), data)
if len(self._connection_hooks) > 0:
for hook in self._connection_hooks:
try:
hook(connectionName, data)
except Exception as exc:
self._warning("Exception calling {0} hook: {1}",
hook, exc)
data = remaining + data
if len(data) == 0:
self._warning("No data received, termination the connection")
stream.close()
connection.close()
break
if self._callback is not None:
lines, remaining = splitter(data)
for line in lines:
ans = self._callback(line)
self._debug("scpi.input say {0!r}", ans)
stream.write(ans) # connection.send(ans)
else:
remaining = b''
stream.close()
self._connection_threads.pop(connectionName)
self._debug("Ending connection: {0} (having {1} active left)",
connectionName, self.active_connections)
def add_connection_hook(self, hook):
if callable(hook):
self._connection_hooks.append(hook)
else:
raise TypeError("The hook must be a callable object")
@deprecated
def addConnectionHook(self, *args):
return self.add_connection_hook(*args)
def remove_connection_hook(self, hook):
if self._connection_hooks.count(hook):
self._connection_hooks.pop(self._connection_hooks.index(hook))
return True
return False
@deprecated
def removeConnectionHook(self, *args):
return self.remove_connection_hook(*args)
| srgblnch/python-scpilib | scpilib/tcpListener.py | Python | gpl-3.0 | 14,483 |
# coding=utf-8
"""
Collect stats via MX4J from Kafka
#### Dependencies
* urllib2
* xml.etree
"""
import urllib2
from urllib import urlencode
try:
from xml.etree import ElementTree
except ImportError:
ElementTree = None
try:
from ElementTree import ParseError as ETParseError
except ImportError:
ETParseError = Exception
import diamond.collector
class KafkaCollector(diamond.collector.Collector):
ATTRIBUTE_TYPES = {
'double': float,
'float': float,
'int': int,
'java.lang.Object': float,
'long': long,
}
def get_default_config_help(self):
config_help = super(KafkaCollector, self).get_default_config_help()
config_help.update({
'host': "",
'port': "",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(KafkaCollector, self).get_default_config()
config.update({
'host': '127.0.0.1',
'port': 8082,
'path': 'kafka',
})
return config
def _get(self, path, query_args=None):
if not path.startswith('/'):
path = '/' + path
qargs = {'template': 'identity'}
if query_args:
qargs.update(query_args)
url = 'http://%s:%i%s?%s' % (
self.config['host'], int(self.config['port']),
path, urlencode(qargs))
try:
response = urllib2.urlopen(url)
except urllib2.URLError, err:
self.log.error("%s: %s", url, err)
return None
try:
return ElementTree.fromstring(response.read())
except ETParseError:
self.log.error("Unable to parse response from mx4j")
return None
def get_mbeans(self, pattern):
query_args = {'querynames': pattern}
mbeans = self._get('/serverbydomain', query_args)
if mbeans is None:
return
found_beans = set()
for mbean in mbeans.getiterator(tag='MBean'):
objectname = mbean.get('objectname')
if objectname:
found_beans.add(objectname)
return found_beans
def query_mbean(self, objectname, key_prefix=None):
query_args = {
'objectname': objectname,
'operations': False,
'constructors': False,
'notifications': False,
}
attributes = self._get('/mbean', query_args)
if attributes is None:
return
if key_prefix is None:
# Could be 1 or 2 = in the string
# java.lang:type=Threading
# "kafka.controller":type="ControllerStats",
# name="LeaderElectionRateAndTimeMs"
split_num = objectname.count('=')
for i in range(split_num):
if i == 0:
key_prefix = objectname.split('=')[1]
if '"' in key_prefix:
key_prefix = key_prefix.split('"')[1]
if "," in key_prefix:
key_prefix = key_prefix.split(',')[0]
elif i > 0:
key = objectname.split('=')[i + 1]
if key:
if '"' in key:
key = key.split('"')[1]
key_prefix = key_prefix + '.' + key
key_prefix = key_prefix.replace(",", ".")
metrics = {}
for attrib in attributes.getiterator(tag='Attribute'):
atype = attrib.get('type')
ptype = self.ATTRIBUTE_TYPES.get(atype)
if not ptype:
continue
try:
value = ptype(attrib.get('value'))
except ValueError:
# It will be too busy, so not logging it every time
self.log.debug('Unable to parse the value for ' +
atype + " in " + objectname)
continue
name = '.'.join([key_prefix, attrib.get('name')])
# Some prefixes and attributes could have spaces, thus we must
# sanitize them
name = name.replace(' ', '')
metrics[name] = value
return metrics
def collect(self):
if ElementTree is None:
self.log.error('Failed to import xml.etree.ElementTree')
return
# Get list of gatherable stats
query_list = [
'*kafka*:*',
'java.lang:type=GarbageCollector,name=*',
'java.lang:type=Threading'
]
mbeans = set()
for pattern in query_list:
match = self.get_mbeans(pattern)
mbeans.update(match)
metrics = {}
# Query each one for stats
for mbean in mbeans:
if mbean is None:
continue
stats = self.query_mbean(mbean)
if stats is None:
self.log.error('Failed to get stats for' + mbean)
metrics.update(stats)
# Publish stats
for metric, value in metrics.iteritems():
self.publish(metric, value)
| skbkontur/Diamond | src/collectors/kafkastat/kafkastat.py | Python | mit | 5,231 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from hwt.interfaces.std import VectSignal
from hwt.interfaces.utils import addClkRstn
from hwt.simulator.simTestCase import SimTestCase
from hwt.synthesizer.unit import Unit
from hwtHls.hlsStreamProc.streamProc import HlsStreamProc
from hwtHls.platform.virtual import VirtualHlsPlatform
from hwtHls.scheduler.errors import TimeConstraintError
from hwtSimApi.utils import freq_to_period
class AlapAsapDiffExample(Unit):
def _config(self):
self.CLK_FREQ = int(400e6)
def _declr(self):
addClkRstn(self)
self.clk.FREQ = self.CLK_FREQ
self.a = VectSignal(8)
self.b = VectSignal(8)
self.c = VectSignal(8)
self.d = VectSignal(8)._m()
def _impl(self):
hls = HlsStreamProc(self)
# inputs has to be readed to enter hls scope
# (without read() operation will not be schedueled by HLS
# but they will be directly synthesized)
a, b, c = [hls.read(intf) for intf in [self.a, self.b, self.c]]
# depending on target platform this expresion
# can be mapped to DPS, LUT, etc...
# no constrains are specified => default strategy is
# to achieve zero delay and minimum latency, for this CLK_FREQ
d = ~(~a & ~b) & ~c
hls.thread(
hls.While(True,
hls.write(d, self.d)
)
)
def neg_8b(a):
return ~a & 0xff
class AlapAsapDiffExample_TC(SimTestCase):
def test_400MHz(self):
self._test_simple(400e6)
def test_200MHz(self):
self._test_simple(200e6)
def test_1Hz(self):
self._test_simple(1)
def test_1GHz_fail(self):
with self.assertRaises(TimeConstraintError):
self._test_simple(1e9)
def _test_simple(self, freq):
u = AlapAsapDiffExample()
u.CLK_FREQ = int(freq)
a = 20
b = 58
c = 48
self.compileSimAndStart(u, target_platform=VirtualHlsPlatform())
u.a._ag.data.append(a)
u.b._ag.data.append(b)
u.c._ag.data.append(c)
self.runSim(int(40 * freq_to_period(u.CLK_FREQ)))
res = u.d._ag.data[-1]
self.assertValEqual(res, neg_8b(neg_8b(a) & neg_8b(b)) & neg_8b(c))
if __name__ == "__main__":
import unittest
from hwt.synthesizer.utils import to_rtl_str
u = AlapAsapDiffExample()
from hwtHls.platform.virtual import makeDebugPasses
print(to_rtl_str(u, target_platform=VirtualHlsPlatform(**makeDebugPasses("tmp"))))
#suite = unittest.TestSuite()
## suite.addTest(FrameTmplTC('test_frameHeader'))
#suite.addTest(unittest.makeSuite(AlapAsapDiffExample_TC))
#runner = unittest.TextTestRunner(verbosity=3)
#runner.run(suite)
| Nic30/hwtHls | tests/utils/alapAsapDiffExample.py | Python | mit | 2,761 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Project.version_privacy_level'
db.add_column('projects_project', 'version_privacy_level',
self.gf('django.db.models.fields.CharField')(default='public', max_length=20),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Project.version_privacy_level'
db.delete_column('projects_project', 'version_privacy_level')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 13, 23, 55, 17, 885486)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 13, 23, 55, 17, 885212)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'projects.importedfile': {
'Meta': {'object_name': 'ImportedFile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'imported_files'", 'to': "orm['projects.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'projects.project': {
'Meta': {'ordering': "('slug',)", 'object_name': 'Project'},
'analytics_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'conf_py_file': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'copyright': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'crate_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'default_branch': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_version': ('django.db.models.fields.CharField', [], {'default': "'latest'", 'max_length': '255'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'django_packages_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'documentation_type': ('django.db.models.fields.CharField', [], {'default': "'sphinx'", 'max_length': '20'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'}),
'project_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'related_projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['projects.Project']", 'null': 'True', 'through': "orm['projects.ProjectRelationship']", 'blank': 'True'}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'repo_type': ('django.db.models.fields.CharField', [], {'default': "'git'", 'max_length': '10'}),
'requirements_file': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'skip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "'.rst'", 'max_length': '10'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '20'}),
'use_system_packages': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'use_virtualenv': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'version_privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'})
},
'projects.projectrelationship': {
'Meta': {'object_name': 'ProjectRelationship'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'superprojects'", 'to': "orm['projects.Project']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subprojects'", 'to': "orm['projects.Project']"})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['projects']
| d0ugal/readthedocs.org | readthedocs/projects/migrations/0028_add_version_default_privacy.py | Python | mit | 9,508 |
# -*- coding: UTF-8 -*-
# Copyright 2019 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
from lino.api import dd, rt, _
from lino.utils.mldbc import babeld
from lino.utils import Cycler
def objects():
List = rt.models.lists.List
Member = rt.models.lists.Member
Partner = rt.models.contacts.Partner
LISTS = Cycler(List.objects.order_by('id'))
for p in dd.plugins.lists.partner_model.objects.order_by('id'):
yield Member(partner=p, list=LISTS.pop())
| lino-framework/xl | lino_xl/lib/lists/fixtures/demo2.py | Python | bsd-2-clause | 527 |
"""
Masking HTTP Errors from Django REST Framework
"""
class HTTP_400_BAD_REQUEST(Exception):
"""400 - Bad Request:
The request was invalid. This response code is common when required
fields are unspecified, formatted incorrectly,
or invalid filters are requested.
"""
pass
class HTTP_401_UNAUTHORIZED(Exception):
"""401 - Unauthorized:
The request authentication failed. The OAuth credentials that
the client supplied were missing or invalid.
"""
pass
class HTTP_403_FORBIDDEN(Exception):
"""403 - Forbidden:
The request credentials authenticated, but the requesting
user or client app is not authorized to access the given resource.
"""
pass
class HTTP_404_NOT_FOUND(Exception):
"""404 - Not Found:
The requested resource does not exist.
"""
pass
class HTTP_405_METHOD_NOT_ALLOWED(Exception):
"""405 - Method Not Allowed:
The requested HTTP method is invalid for the given resource.
Review the resource documentation for supported methods.
"""
pass
class HTTP_500_INTERNAL_SERVER_ERROR(Exception):
"""500 - Server Error:
The server failed to fulfill the request.
Please notify support with details of the request
and response so that we can fix the problem.
"""
pass
| igorfala/python-under-armour | UnderArmour/Exceptions.py | Python | mit | 1,304 |
class A:
def __init__(self):
self._x = 1
def _foo(self):
pass
| smmribeiro/intellij-community | python/testData/inspections/PyProtectedMemberInspection/ClassInAnotherModule/m1.py | Python | apache-2.0 | 87 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Special Math Ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
__all__ = [
"ndtr",
"ndtri",
"log_ndtr",
"log_cdf_laplace",
]
# log_ndtr uses different functions over the ranges
# (-infty, lower](lower, upper](upper, infty)
# Lower bound values were chosen by examining where the support of ndtr
# appears to be zero, relative to scipy's (which is always 64bit). They were
# then made more conservative just to be safe. (Conservative means use the
# expansion more than we probably need to.) See `NdtrTest` in
# special_math_test.py.
LOGNDTR_FLOAT64_LOWER = -20
LOGNDTR_FLOAT32_LOWER = -10
# Upper bound values were chosen by examining for which values of 'x'
# Log[cdf(x)] is 0, after which point we need to use the approximation
# Log[cdf(x)] = Log[1 - cdf(-x)] approx -cdf(-x). We chose a value slightly
# conservative, meaning we use the approximation earlier than needed.
LOGNDTR_FLOAT64_UPPER = 8
LOGNDTR_FLOAT32_UPPER = 5
def ndtr(x, name="ndtr"):
"""Normal distribution function.
Returns the area under the Gaussian probability density function, integrated
from minus infinity to x:
```
1 / x
ndtr(x) = ---------- | exp(-0.5 t**2) dt
sqrt(2 pi) /-inf
= 0.5 (1 + erf(x / sqrt(2)))
= 0.5 erfc(x / sqrt(2))
```
Args:
x: `Tensor` of type `float32`, `float64`.
name: Python string. A name for the operation (default="ndtr").
Returns:
ndtr: `Tensor` with `dtype=x.dtype`.
Raises:
TypeError: if `x` is not floating-type.
"""
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
if x.dtype.as_numpy_dtype not in [np.float32, np.float64]:
raise TypeError(
"x.dtype=%s is not handled, see docstring for supported types."
% x.dtype)
return _ndtr(x)
def _ndtr(x):
"""Implements ndtr core logic."""
half_sqrt_2 = constant_op.constant(
0.5 * math.sqrt(2.), dtype=x.dtype, name="half_sqrt_2")
w = x * half_sqrt_2
z = math_ops.abs(w)
y = array_ops.where(math_ops.less(z, half_sqrt_2),
1. + math_ops.erf(w),
array_ops.where(math_ops.greater(w, 0.),
2. - math_ops.erfc(z),
math_ops.erfc(z)))
return 0.5 * y
def ndtri(p, name="ndtri"):
"""The inverse of the CDF of the Normal distribution function.
Returns x such that the area under the pdf from minus infinity to x is equal
to p.
A piece-wise rational approximation is done for the function.
This is a port of the implementation in netlib.
Args:
p: `Tensor` of type `float32`, `float64`.
name: Python string. A name for the operation (default="ndtri").
Returns:
x: `Tensor` with `dtype=p.dtype`.
Raises:
TypeError: if `p` is not floating-type.
"""
with ops.name_scope(name, values=[p]):
p = ops.convert_to_tensor(p, name="p")
if p.dtype.as_numpy_dtype not in [np.float32, np.float64]:
raise TypeError(
"p.dtype=%s is not handled, see docstring for supported types."
% p.dtype)
return _ndtri(p)
def _ndtri(p):
"""Implements ndtri core logic."""
# Constants used in piece-wise rational approximations. Taken from the cephes
# library:
# https://github.com/scipy/scipy/blob/master/scipy/special/cephes/ndtri.c
p0 = list(reversed([-5.99633501014107895267E1,
9.80010754185999661536E1,
-5.66762857469070293439E1,
1.39312609387279679503E1,
-1.23916583867381258016E0]))
q0 = list(reversed([1.0,
1.95448858338141759834E0,
4.67627912898881538453E0,
8.63602421390890590575E1,
-2.25462687854119370527E2,
2.00260212380060660359E2,
-8.20372256168333339912E1,
1.59056225126211695515E1,
-1.18331621121330003142E0]))
p1 = list(reversed([4.05544892305962419923E0,
3.15251094599893866154E1,
5.71628192246421288162E1,
4.40805073893200834700E1,
1.46849561928858024014E1,
2.18663306850790267539E0,
-1.40256079171354495875E-1,
-3.50424626827848203418E-2,
-8.57456785154685413611E-4]))
q1 = list(reversed([1.0,
1.57799883256466749731E1,
4.53907635128879210584E1,
4.13172038254672030440E1,
1.50425385692907503408E1,
2.50464946208309415979E0,
-1.42182922854787788574E-1,
-3.80806407691578277194E-2,
-9.33259480895457427372E-4]))
p2 = list(reversed([3.23774891776946035970E0,
6.91522889068984211695E0,
3.93881025292474443415E0,
1.33303460815807542389E0,
2.01485389549179081538E-1,
1.23716634817820021358E-2,
3.01581553508235416007E-4,
2.65806974686737550832E-6,
6.23974539184983293730E-9]))
q2 = list(reversed([1.0,
6.02427039364742014255E0,
3.67983563856160859403E0,
1.37702099489081330271E0,
2.16236993594496635890E-1,
1.34204006088543189037E-2,
3.28014464682127739104E-4,
2.89247864745380683936E-6,
6.79019408009981274425E-9]))
def _create_polynomial(var, coeffs):
"""Compute n_th order polynomial via Horner's method."""
if not coeffs:
return 0.
return coeffs[0] + _create_polynomial(var, coeffs[1:]) * var
maybe_complement_p = array_ops.where(p > 1. - np.exp(-2.), 1. - p, p)
# Write in an arbitrary value in place of 0 for p since 0 will cause NaNs
# later on. The result from the computation when p == 0 is not used so any
# number that doesn't result in NaNs is fine.
sanitized_mcp = array_ops.where(
maybe_complement_p <= 0.,
0.5 * array_ops.ones_like(p),
maybe_complement_p)
# Compute x for p > exp(-2): x/sqrt(2pi) = w + w**3 P0(w**2)/Q0(w**2).
w = sanitized_mcp - 0.5
ww = w ** 2
x_for_big_p = w + w * ww * (_create_polynomial(ww, p0)
/ _create_polynomial(ww, q0))
x_for_big_p *= -np.sqrt(2. * np.pi)
# Compute x for p <= exp(-2): x = z - log(z)/z - (1/z) P(1/z) / Q(1/z),
# where z = sqrt(-2. * log(p)), and P/Q are chosen between two different
# arrays based on wether p < exp(-32).
z = math_ops.sqrt(-2. * math_ops.log(sanitized_mcp))
first_term = z - math_ops.log(z) / z
second_term_small_p = (_create_polynomial(1. / z, p2)
/ _create_polynomial(1. / z, q2)) / z
second_term_otherwise = (_create_polynomial(1. / z, p1)
/ _create_polynomial(1. / z, q1)) / z
x_for_small_p = first_term - second_term_small_p
x_otherwise = first_term - second_term_otherwise
x = array_ops.where(sanitized_mcp > np.exp(-2.),
x_for_big_p,
array_ops.where(z >= 8.0, x_for_small_p, x_otherwise))
x = array_ops.where(p > 1. - np.exp(-2.), x, -x)
infinity = constant_op.constant(np.inf, dtype=x.dtype) * array_ops.ones_like(x)
x_nan_replaced = array_ops.where(
p <= 0.0, -infinity, array_ops.where(p >= 1.0, infinity, x))
return x_nan_replaced
def log_ndtr(x, series_order=3, name="log_ndtr"):
"""Log Normal distribution function.
For details of the Normal distribution function see `ndtr`.
This function calculates `(log o ndtr)(x)` by either calling `log(ndtr(x))` or
using an asymptotic series. Specifically:
- For `x > upper_segment`, use the approximation `-ndtr(-x)` based on
`log(1-x) ~= -x, x << 1`.
- For `lower_segment < x <= upper_segment`, use the existing `ndtr` technique
and take a log.
- For `x <= lower_segment`, we use the series approximation of erf to compute
the log CDF directly.
The `lower_segment` is set based on the precision of the input:
```
lower_segment = { -20, x.dtype=float64
{ -10, x.dtype=float32
upper_segment = { 8, x.dtype=float64
{ 5, x.dtype=float32
```
When `x < lower_segment`, the `ndtr` asymptotic series approximation is:
```
ndtr(x) = scale * (1 + sum) + R_N
scale = exp(-0.5 x**2) / (-x sqrt(2 pi))
sum = Sum{(-1)^n (2n-1)!! / (x**2)^n, n=1:N}
R_N = O(exp(-0.5 x**2) (2N+1)!! / |x|^{2N+3})
```
where `(2n-1)!! = (2n-1) (2n-3) (2n-5) ... (3) (1)` is a
[double-factorial](https://en.wikipedia.org/wiki/Double_factorial).
Args:
x: `Tensor` of type `float32`, `float64`.
series_order: Positive Python `integer`. Maximum depth to
evaluate the asymptotic expansion. This is the `N` above.
name: Python string. A name for the operation (default="log_ndtr").
Returns:
log_ndtr: `Tensor` with `dtype=x.dtype`.
Raises:
TypeError: if `x.dtype` is not handled.
TypeError: if `series_order` is a not Python `integer.`
ValueError: if `series_order` is not in `[0, 30]`.
"""
if not isinstance(series_order, int):
raise TypeError("series_order must be a Python integer.")
if series_order < 0:
raise ValueError("series_order must be non-negative.")
if series_order > 30:
raise ValueError("series_order must be <= 30.")
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
if x.dtype.as_numpy_dtype == np.float64:
lower_segment = LOGNDTR_FLOAT64_LOWER
upper_segment = LOGNDTR_FLOAT64_UPPER
elif x.dtype.as_numpy_dtype == np.float32:
lower_segment = LOGNDTR_FLOAT32_LOWER
upper_segment = LOGNDTR_FLOAT32_UPPER
else:
raise TypeError("x.dtype=%s is not supported." % x.dtype)
# The basic idea here was ported from py/scipy/special/cephes/ndtr.c.
# We copy the main idea, with a few changes
# * For x >> 1, and X ~ Normal(0, 1),
# Log[P[X < x]] = Log[1 - P[X < -x]] approx -P[X < -x],
# which extends the range of validity of this function.
# * We use one fixed series_order for all of 'x', rather than adaptive.
# * Our docstring properly reflects that this is an asymptotic series, not a
# Taylor series. We also provided a correct bound on the remainder.
# * We need to use the max/min in the _log_ndtr_lower arg to avoid nan when
# x=0. This happens even though the branch is unchosen because when x=0
# the gradient of a select involves the calculation 1*dy+0*(-inf)=nan
# regardless of whether dy is finite. Note that the minimum is a NOP if
# the branch is chosen.
return array_ops.where(
math_ops.greater(x, upper_segment),
-_ndtr(-x), # log(1-x) ~= -x, x << 1
array_ops.where(math_ops.greater(x, lower_segment),
math_ops.log(_ndtr(math_ops.maximum(x, lower_segment))),
_log_ndtr_lower(math_ops.minimum(x, lower_segment),
series_order)))
def _log_ndtr_lower(x, series_order):
"""Asymptotic expansion version of `Log[cdf(x)]`, appropriate for `x<<-1`."""
x_2 = math_ops.square(x)
# Log of the term multiplying (1 + sum)
log_scale = -0.5 * x_2 - math_ops.log(-x) - 0.5 * math.log(2. * math.pi)
return log_scale + math_ops.log(_log_ndtr_asymptotic_series(x, series_order))
def _log_ndtr_asymptotic_series(x, series_order):
"""Calculates the asymptotic series used in log_ndtr."""
if series_order <= 0:
return 1.
x_2 = math_ops.square(x)
even_sum = 0.
odd_sum = 0.
x_2n = x_2 # Start with x^{2*1} = x^{2*n} with n = 1.
for n in range(1, series_order + 1):
if n % 2:
odd_sum += _double_factorial(2 * n - 1) / x_2n
else:
even_sum += _double_factorial(2 * n - 1) / x_2n
x_2n *= x_2
return 1. + even_sum - odd_sum
def _double_factorial(n):
"""The double factorial function for small Python integer `n`."""
return np.prod(np.arange(n, 1, -2))
def log_cdf_laplace(x, name="log_cdf_laplace"):
"""Log Laplace distribution function.
This function calculates `Log[L(x)]`, where `L(x)` is the cumulative
distribution function of the Laplace distribution, i.e.
```L(x) := 0.5 * int_{-infty}^x e^{-|t|} dt```
For numerical accuracy, `L(x)` is computed in different ways depending on `x`,
```
x <= 0:
Log[L(x)] = Log[0.5] + x, which is exact
0 < x:
Log[L(x)] = Log[1 - 0.5 * e^{-x}], which is exact
```
Args:
x: `Tensor` of type `float32`, `float64`.
name: Python string. A name for the operation (default="log_ndtr").
Returns:
`Tensor` with `dtype=x.dtype`.
Raises:
TypeError: if `x.dtype` is not handled.
"""
with ops.name_scope(name, values=[x]):
x = ops.convert_to_tensor(x, name="x")
# For x < 0, L(x) = 0.5 * exp{x} exactly, so Log[L(x)] = log(0.5) + x.
lower_solution = -np.log(2.) + x
# safe_exp_neg_x = exp{-x} for x > 0, but is
# bounded above by 1, which avoids
# log[1 - 1] = -inf for x = log(1/2), AND
# exp{-x} --> inf, for x << -1
safe_exp_neg_x = math_ops.exp(-math_ops.abs(x))
# log1p(z) = log(1 + z) approx z for |z| << 1. This approxmation is used
# internally by log1p, rather than being done explicitly here.
upper_solution = math_ops.log1p(-0.5 * safe_exp_neg_x)
return array_ops.where(x < 0., lower_solution, upper_solution)
| dyoung418/tensorflow | tensorflow/python/ops/distributions/special_math.py | Python | apache-2.0 | 14,730 |
from unittest import TestCase
from agent_finder import find_subclasses
import opencog.cogserver
import test_agent
class HelperTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_find_agents(self):
x=find_subclasses(test_agent,opencog.cogserver.MindAgent)
self.assertEqual(len(x),1)
self.assertEqual(x[0][0], 'TestAgent')
| rkarlberg/opencog | tests/cython/test_agent_finder.py | Python | agpl-3.0 | 400 |
##########################################################################
# This file is part of WTFramework.
#
# WTFramework is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WTFramework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WTFramework. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
from wtframework.wtf.testobjects.test_watchers import DelayedTestFailTestWatcher, \
CaptureScreenShotOnErrorTestWatcher
from wtframework.wtf.testobjects.testcase import WatchedTestCase
import inspect
class WTFBaseTest(WatchedTestCase):
'''
Test can extend this basetest to add additional unit test functionality such as
take screenshot on failure.
Example::
from wtframework.wtf.testobjects.basetests import WTFBaseTest
from wtframework.wtf.web.webdriver import WTF_WEBDRIVER_MANAGER
class TestScreenCaptureOnFail(WTFBaseTest):
""""
These test cases are expected to fail. They are here to test
the screen capture on failure.
"""
# Comment out decorator to manually test the screen capture.
@unittest.expectedFailure
def test_fail(self):
driver = WTF_WEBDRIVER_MANAGER.new_driver()
driver.get('http://www.google.com')
self.fail()
#Check your /screenshots folder for a screenshot of Google Homepage.
For the screen capture to work, you need to make sure you use WTF_WEBDRIVER_MANAGER for
getting your webdriver instance. This is used for getting the current instance of webdriver
when a test fails in order to take a screenshot.
WTFBaseTest is also an instance of WatchedTestCase, which you can use to add additional
call backs you wish to use for handling errors or other test events.
'''
def __init__(self, methodName='runTest', webdriver_provider=None, screenshot_util=None):
"""
Constructor matches that of UnitTest2 test case, but modified to allow passing in
a ScreenShot utility and register delayed test watchers.
Kwargs:
methodName (str) : Test method name.
webdriver_provider (WebdriverManager) : Default webdriver provider.
screenshot_util (CaptureScreenShotOnErrorTestWatcher) : Screenshot capture utility.
"""
super(WTFBaseTest, self).__init__(methodName)
self._register_watcher(
CaptureScreenShotOnErrorTestWatcher(webdriver_provider, screenshot_util))
# Note this watcher should be registered after all other watchers that use
# on_test_passed() event.
self._delayed_test_watcher = DelayedTestFailTestWatcher()
self._register_watcher(self._delayed_test_watcher)
def assertWithDelayedFailure(self, assert_method, *args, **kwargs):
"""
Cause an assertion failure to be delayed till the end of the test.
This is good to use if you want the test to continue after an assertion
fails, and do addtional assertions. At the end of the test, it will
pool all the test failures into 1 failed assert with a summary of
all the test failures that occurred during the test.
Args:
assert_method (function) - Assert method to run.
args - arguments to pass into the assert method.
Kwargs:
kwargs - additional kwargs to pass into the assert method.
Will assert if percent == 100 at the end of the test.::
self.assertWithDelayedFailure(self.AssertEquals, 100, percent)
"""
frame = None
try:
# attempt to get parent frames
frame = inspect.getouterframes(inspect.currentframe())[1]
except:
pass # oh well, we couldn't get it.
assert_func = lambda: assert_method(*args, **kwargs)
generated_exception = self._delayed_test_watcher.delay_failure(
assert_func, frame)
if generated_exception:
# Call our on_fail for our test watchers. So we can trigger our screen
# capture at moment of failure.
for test_watcher in self.__wtf_test_watchers__:
test_watcher.on_test_failure(
self, self._resultForDoCleanups, generated_exception)
| wiredrive/wtframework | wtframework/wtf/testobjects/basetests.py | Python | gpl-3.0 | 4,855 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.