text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#!/usr/bin/python -Wall
# -*- coding: utf-8 -*-
"""
<div id="content">
<div style="text-align:center;" class="print"><img src="images/print_page_logo.png" alt="projecteuler.net" style="border:none;" /></div>
<h2>Number letter counts</h2><div id="problem_info" class="info"><h3>Problem 17</h3><span>Published on Friday, 17th May 2002, 06:00 pm; Solved by 88413; Difficulty rating: 5%</span></div>
<div class="problem_content" role="problem">
<p>If the numbers 1 to 5 are written out in words: one, two, three, four, five, then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total.</p>
<p>If all the numbers from 1 to 1000 (one thousand) inclusive were written out in words, how many letters would be used? </p>
<br />
<p class="note"><b>NOTE:</b> Do not count spaces or hyphens. For example, 342 (three hundred and forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20 letters. The use of "and" when writing out numbers is in compliance with British usage.</p>
</div><br />
<br /></div>
"""
s={0:"",1:"one",2:"two",3:"three",4:"four",5:"five",6:"six",7:"seven",8:"eight",9:"nine",10:"ten",11:"eleven",12:"twelve",13:"thirteen",14:"fourteen",15:"fifteen",16:"sixteen",17:"seventeen",18:"eighteen",19:"nineteen",20:"twenty",30:"thirty",40:"forty",50:"fifty",60:"sixty",70:"seventy",80:"eighty",90:"ninety"}
for i in range(1,1000):
if(not i in s.keys()):
if(i<100):
s[i]=s[i/10*10]+s[i%10]
else:
s[i]=s[i/100]+"hundred"
if(i%100):
s[i]+="and"+s[i%100]
s[1000]="onethousand"
total=0;
for i in s.values():
total+=len(i)
print total
| beyoungwoo/C_glibc_Sample | _Algorithm/ProjectEuler_python/euler_17.py | Python | gpl-3.0 | 1,631 | 0.045984 |
import os
import numpy as np
import cv2
import json
start_index = 0
image_path = 'images/trial_3_autoencoder/'
test_path = 'test/trial_3_autoencoder/'
json_file = 'annotations/all_patient.json'
image_rows = 376
image_cols = 312
image_rows_map = 46
image_cols_map = 38
with open(json_file) as jf:
dict = json.load(jf)
def gaussian_kernel(h, w, sigma_h, sigma_w):
yx = np.mgrid[-h//2:h//2,-w//2:w//2]**2
return np.exp(-yx[0,:,:] / sigma_h**2 - yx[1,:,:] / sigma_w**2)
def max(a,b):
return a if a>=b else b
def min(a,b):
return a if a<=b else b
def gen_kernel(score_map,img_info,h, w, sigma_h, sigma_w):
kernal = gaussian_kernel(h, w, sigma_h, sigma_w)
y, x = np.unravel_index(np.argmax(score_map), [len(score_map), len(score_map[0])])
score_map[max(y-h//2,0):min(y+h//2,img_info["img_height"]), max(x-w//2,0):min(x+w//2,img_info["img_width"])] \
= kernal[max(h//2-y,0):,max(w//2-x,0):]
# cv2.imshow('after',score_map)
# cv2.waitKey()
score_map = cv2.resize(score_map, (image_cols, image_rows), interpolation=cv2.INTER_CUBIC)
# cv2.imshow('after',score_map)
# cv2.waitKey()
return score_map
def gen_center_kernel(center_map,img_info,h, w, sigma_h, sigma_w):
kernal = gaussian_kernel(h, w, sigma_h, sigma_w)
y, x = np.unravel_index(np.argmax(center_map), [len(center_map), len(center_map[0])])
center_map[max(y-h//2,0):min(y+h//2,img_info["img_height"]), max(x-w//2,0):min(x+w//2,img_info["img_width"])] \
= kernal[max(h//2-y,0):,max(w//2-x,0):]
center_map = cv2.resize(center_map, (image_cols, image_rows), interpolation=cv2.INTER_CUBIC)
return center_map
flip_map = [0, 1, 5, 6, 7, 2, 3, 4, 11, 12, 13, 8, 9, 10]
def create_train_data():
print('Creating training original images...')
print('-'*30)
i = 0
path_depth = os.path.join(image_path, 'depth_vis')
path_ir = os.path.join(image_path, 'ir')
train_depth = os.listdir(path_depth)
total_imgs = len(train_depth)*2
depth_imgs = np.ndarray((total_imgs, image_rows, image_cols, 3), dtype=np.uint8)
ir_imgs = np.ndarray((total_imgs, image_rows, image_cols, 3), dtype=np.uint8)
centers = np.ndarray((total_imgs, 2), dtype=np.int16)
annotations = np.ndarray((total_imgs, 14, 2), dtype=np.int16)
for img_info in dict:
if(img_info["patient"] != "7"):
depth_img = cv2.imread(os.path.join(path_depth, img_info["image_name"]),cv2.IMREAD_UNCHANGED)
ir_img = cv2.imread(os.path.join(path_ir, img_info["image_name"]))
depth_img_resized = cv2.resize(depth_img, (image_cols, image_rows), interpolation=cv2.INTER_NEAREST)
ir_img_resized = cv2.resize(ir_img, (image_cols, image_rows), interpolation=cv2.INTER_NEAREST)
# ir_img_resized_small = cv2.resize(ir_img, (image_cols_map, image_rows_map), interpolation=cv2.INTER_NEAREST)
depth_imgs[i,:,:,0] = depth_img_resized
depth_imgs[i,:,:,1] = depth_img_resized
depth_imgs[i,:,:,2] = depth_img_resized
depth_imgs[i+1,:,:,0] = cv2.flip(depth_img_resized,1)
depth_imgs[i+1,:,:,1] = cv2.flip(depth_img_resized,1)
depth_imgs[i+1,:,:,2] = cv2.flip(depth_img_resized,1)
ir_imgs[i] = ir_img_resized
ir_imgs[i+1] = cv2.flip(ir_img_resized, 1)
center_map = np.zeros((int(img_info["img_height"]), int(img_info["img_width"])))
center_map[img_info["objpos"][0]][img_info["objpos"][1]] = 1
center_map_resized = cv2.resize(center_map, (image_cols, image_rows), interpolation=cv2.INTER_CUBIC)
center_map_resized_fliped = cv2.flip(center_map_resized, 1)
centers[i] = np.unravel_index(np.argmax(center_map_resized), [center_map_resized.shape[0], center_map_resized.shape[1]])
centers[i+1] = np.unravel_index(np.argmax(center_map_resized_fliped), [center_map_resized_fliped.shape[0], center_map_resized_fliped.shape[1]])
for x in range(0,14):
score_map = np.zeros((int(img_info["img_height"]), int(img_info["img_width"])))
score_map[img_info["joints"][x][0]][img_info["joints"][x][1]] = 1
score_map_resized = cv2.resize(score_map, (image_cols, image_rows), interpolation=cv2.INTER_CUBIC)
score_map_resized_fliped = cv2.flip(score_map_resized, 1)
annotations[i][x] = np.unravel_index(np.argmax(score_map_resized), [score_map_resized.shape[0], score_map_resized.shape[1]])
annotations[i + 1][flip_map[x]] = np.unravel_index(np.argmax(score_map_resized_fliped), [score_map_resized_fliped.shape[0], score_map_resized_fliped.shape[1]])
# for x in range(0,14):
# score_map = np.zeros((image_rows, image_cols))
# score_map[annotations[i][x][0]][annotations[i][x][1]] = 1
# score_map1 = np.zeros((image_rows, image_cols))
# score_map1[annotations[i+1][x][0]][annotations[i+1][x][1]] = 1
# cv2.imshow('show',score_map)
# cv2.imshow('show2', score_map1)
# cv2.waitKey(1000)
if i % 100 == 0:
print('Done: {0}/{1} train original images'.format(i, total_imgs))
i += 2
print('Loading done.')
np.save('./dataset/train_autoencoder_depth.npy', depth_imgs)
np.save('./dataset/train_autoencoder_ir.npy', ir_imgs)
np.save('./dataset/train_autoencoder_center.npy', centers)
np.save('./dataset/train_autoencoder_annotation.npy', annotations)
print('Saving done.')
def create_test_data():
print('Creating test images...')
print('-' * 30)
i = 0
path_depth = os.path.join(test_path, 'depth_vis')
path_ir = os.path.join(test_path, 'ir')
test_depth = os.listdir(path_depth)
total_imgs = len(test_depth) * 2
depth_imgs = np.ndarray((total_imgs, image_rows, image_cols, 3), dtype=np.uint8)
ir_imgs = np.ndarray((total_imgs, image_rows, image_cols, 3), dtype=np.uint8)
centers = np.ndarray((total_imgs, 2), dtype=np.int16)
annotations = np.ndarray((total_imgs, 14, 2), dtype=np.int16)
for img_info in dict:
if (img_info["patient"] == "7"):
depth_img = cv2.imread(os.path.join(path_depth, img_info["image_name"]), cv2.IMREAD_UNCHANGED)
ir_img = cv2.imread(os.path.join(path_ir, img_info["image_name"]))
depth_img_resized = cv2.resize(depth_img, (image_cols, image_rows), interpolation=cv2.INTER_NEAREST)
ir_img_resized = cv2.resize(ir_img, (image_cols, image_rows), interpolation=cv2.INTER_NEAREST)
depth_img_resized = np.asarray(depth_img_resized)
depth_imgs[i,:,:,0] = depth_img_resized
depth_imgs[i,:,:,1] = depth_img_resized
depth_imgs[i,:,:,2] = depth_img_resized
depth_imgs[i+1,:,:,0] = cv2.flip(depth_img_resized, 1)
depth_imgs[i+1,:,:,1] = cv2.flip(depth_img_resized, 1)
depth_imgs[i+1,:,:,2] = cv2.flip(depth_img_resized, 1)
ir_imgs[i] = ir_img_resized
ir_imgs[i+1] = cv2.flip(ir_img_resized, 1)
center_map = np.zeros((int(img_info["img_height"]), int(img_info["img_width"])))
center_map[img_info["objpos"][0]][img_info["objpos"][1]] = 1
center_map_resized = cv2.resize(center_map, (image_cols, image_rows), interpolation=cv2.INTER_CUBIC)
center_map_resized_fliped = cv2.flip(center_map_resized, 1)
centers[i] = np.unravel_index(np.argmax(center_map_resized),
[center_map_resized.shape[0], center_map_resized.shape[1]])
centers[i + 1] = np.unravel_index(np.argmax(center_map_resized_fliped),
[center_map_resized_fliped.shape[0], center_map_resized_fliped.shape[1]])
for x in range(0, 14):
score_map = np.zeros((int(img_info["img_height"]), int(img_info["img_width"])))
score_map[img_info["joints"][x][0]][img_info["joints"][x][1]] = 1
score_map_resized = cv2.resize(score_map, (image_cols, image_rows), interpolation=cv2.INTER_CUBIC)
score_map_resized_fliped = cv2.flip(score_map_resized, 1)
annotations[i][x] = np.unravel_index(np.argmax(score_map_resized),
[score_map_resized.shape[0], score_map_resized.shape[1]])
annotations[i + 1][flip_map[x]] = np.unravel_index(np.argmax(score_map_resized_fliped),
[score_map_resized_fliped.shape[0], score_map_resized_fliped.shape[1]])
if i % 100 == 0:
print('Done: {0}/{1} train original images'.format(i, total_imgs))
i += 2
print(depth_imgs.shape)
print('Loading done.')
np.save('./dataset/test_autoencoder_depth.npy', depth_imgs)
np.save('./dataset/test_autoencoder_ir.npy', ir_imgs)
np.save('./dataset/test_autoencoder_center.npy', centers)
np.save('./dataset/test_autoencoder_annotation.npy', annotations)
print('Saving done.')
if __name__ == '__main__':
# create_train_data()
create_test_data()
# check() | lyuboshen/Pose-Estimation-on-Depth-Images-of-Clinical-Patients-V2.0 | src/load_data.py | Python | mit | 9,257 | 0.013179 |
def extract_cache_dicts(cache_dicts, key_list, num_out):
if cache_dicts is not None:
if len(cache_dicts) != num_out:
raise ValueError(f'Length of cache_dicts should be equal to the number of output files!')
if len(key_list) == 1:
return [c[key_list[0]] for c in cache_dicts]
return zip(*[[c[k] for k in key_list] for c in cache_dicts])
return [None]*len(key_list)
def return_cache_dict(key_list, value_list):
for v in value_list:
if len(v) != 1:
raise ValueError(f'Only support caching for one system at a time!')
cache_dict = {k:v[0] for (k, v) in zip(key_list, value_list)}
return cache_dict
| neulab/compare-mt | compare_mt/cache_utils.py | Python | bsd-3-clause | 642 | 0.018692 |
""" A series of mocks for metapipe. """
from metapipe.models import Job
class MockJob(Job):
def __init__(self, alias, command, depends_on=[]):
super(MockJob, self).__init__(alias, command, depends_on)
self._submitted = False
self._done = False
self._step = 0
def __repr__(self):
return '<MockJob: {}>'.format(self.alias)
def submit(self):
self._step += 1
def is_running(self):
self._step += 1
return self._step > 1 and self._step < 10
def is_queued(self):
return False
def is_complete(self):
return self._step > 10
def is_fail(self):
return False
| Sonictherocketman/metapipe | test/mocks.py | Python | mit | 673 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-20 21:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django_api.models
class Migration(migrations.Migration):
dependencies = [
('django_api', '0018_auto_20160420_2316'),
]
operations = [
migrations.CreateModel(
name='AreaImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to=django_api.models._image_file_path, verbose_name='bild')),
('area', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='image', to='django_api.Area')),
],
options={
'verbose_name': 'områdes bild',
'verbose_name_plural': 'områdes bild',
},
),
migrations.CreateModel(
name='RockFaceImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to=django_api.models._image_file_path, verbose_name='bild')),
('name', models.CharField(max_length=255, null=True, verbose_name='namn')),
('description', models.TextField(blank=True, null=True, verbose_name='kort beskrivning av bilden')),
('rockface', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='image', to='django_api.RockFace')),
],
options={
'verbose_name': 'bild på klippan',
'verbose_name_plural': 'bilder på klippan',
},
),
migrations.AddField(
model_name='route',
name='image',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='django_api.RockFaceImage', verbose_name='bild'),
),
]
| IsacEkberg/crag-finder | django_api/migrations/0019_auto_20160420_2323.py | Python | gpl-3.0 | 2,068 | 0.00436 |
"""
Copyright (c) 2008, Carl J Meyer
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* The names of its contributors may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Time-stamp: <2008-11-21 01:54:45 carljm forms.py>
"""
from copy import deepcopy
from django import forms
from django.forms.util import flatatt
from django.utils.safestring import mark_safe
class Fieldset(object):
"""
An iterable Fieldset with a legend and a set of BoundFields.
"""
def __init__(self, form, name, boundfields, legend=None, description=''):
self.form = form
self.boundfields = boundfields
if legend is None: legend = name
self.legend = mark_safe(legend)
self.description = mark_safe(description)
self.name = name
def __iter__(self):
for bf in self.boundfields:
yield _mark_row_attrs(bf, self.form)
def __repr__(self):
return "%s('%s', %s, legend='%s', description='%s')" % (
self.__class__.__name__, self.name,
[f.name for f in self.boundfields], self.legend, self.description)
class FieldsetCollection(object):
def __init__(self, form, fieldsets):
self.form = form
self.fieldsets = fieldsets
def __len__(self):
return len(self.fieldsets) or 1
def __iter__(self):
if not self.fieldsets:
self.fieldsets = (('main', {'fields': self.form.fields.keys(),
'legend': ''}),)
for name, options in self.fieldsets:
try:
field_names = [n for n in options['fields']
if n in self.form.fields]
except KeyError:
raise ValueError("Fieldset definition must include 'fields' option." )
boundfields = [forms.forms.BoundField(self.form, self.form.fields[n], n)
for n in field_names]
yield Fieldset(self.form, name, boundfields,
options.get('legend', None),
options.get('description', ''))
def _get_meta_attr(attrs, attr, default):
try:
ret = getattr(attrs['Meta'], attr)
except (KeyError, AttributeError):
ret = default
return ret
def get_fieldsets(bases, attrs):
"""
Get the fieldsets definition from the inner Meta class, mapping it
on top of the fieldsets from any base classes.
"""
fieldsets = _get_meta_attr(attrs, 'fieldsets', ())
new_fieldsets = {}
order = []
for base in bases:
for fs in getattr(base, 'base_fieldsets', ()):
new_fieldsets[fs[0]] = fs
order.append(fs[0])
for fs in fieldsets:
new_fieldsets[fs[0]] = fs
if fs[0] not in order:
order.append(fs[0])
return [new_fieldsets[name] for name in order]
def get_row_attrs(bases, attrs):
"""
Get the row_attrs definition from the inner Meta class.
"""
return _get_meta_attr(attrs, 'row_attrs', {})
def _mark_row_attrs(bf, form):
row_attrs = deepcopy(form._row_attrs.get(bf.name, {}))
if bf.field.required:
req_class = 'required'
else:
req_class = 'optional'
if 'class' in row_attrs:
row_attrs['class'] = row_attrs['class'] + ' ' + req_class
else:
row_attrs['class'] = req_class
bf.row_attrs = mark_safe(flatatt(row_attrs))
return bf
class BetterFormBaseMetaclass(type):
def __new__(cls, name, bases, attrs):
attrs['base_fieldsets'] = get_fieldsets(bases, attrs)
attrs['base_row_attrs'] = get_row_attrs(bases, attrs)
new_class = super(BetterFormBaseMetaclass,
cls).__new__(cls, name, bases, attrs)
return new_class
class BetterFormMetaclass(BetterFormBaseMetaclass,
forms.forms.DeclarativeFieldsMetaclass):
pass
class BetterModelFormMetaclass(BetterFormBaseMetaclass,
forms.models.ModelFormMetaclass):
pass
class BetterBaseForm(object):
"""
``BetterForm`` and ``BetterModelForm`` are subclasses of Form
and ModelForm that allow for declarative definition of fieldsets
and row_attrs in an inner Meta class.
The row_attrs declaration is a dictionary mapping field names to
dictionaries of attribute/value pairs. The attribute/value
dictionaries will be flattened into HTML-style attribute/values
(i.e. {'style': 'display: none'} will become ``style="display:
none"``), and will be available as the ``row_attrs`` attribute of
the ``BoundField``. Also, a CSS class of "required" or "optional"
will automatically be added to the row_attrs of each
``BoundField``, depending on whether the field is required.
The fieldsets declaration is a list of two-tuples very similar to
the ``fieldsets`` option on a ModelAdmin class in
``django.contrib.admin``.
The first item in each two-tuple is a name for the fieldset (must
be unique, so that overriding fieldsets of superclasses works),
and the second is a dictionary of fieldset options
Valid fieldset options in the dictionary include:
``fields`` (required): A tuple of field names to display in this
fieldset.
``classes``: A list of extra CSS classes to apply to the fieldset.
``legend``: This value, if present, will be the contents of a
``legend`` tag to open the fieldset. If not present the unique
name of the fieldset will be used (so a value of '' for legend
must be used if no legend is desired.)
``description``: A string of optional extra text to be displayed
under the ``legend`` of the fieldset.
When iterated over, the ``fieldsets`` attribute of a
``BetterForm`` (or ``BetterModelForm``) yields ``Fieldset``s.
Each ``Fieldset`` has a name attribute, a legend attribute, and a
description attribute, and when iterated over yields its
``BoundField``s.
For backwards compatibility, a ``BetterForm`` or
``BetterModelForm`` can still be iterated over directly to yield
all of its ``BoundField``s, regardless of fieldsets.
For more detailed examples, see the doctests in tests/__init__.py.
"""
def __init__(self, *args, **kwargs):
self._fieldsets = deepcopy(self.base_fieldsets)
self._row_attrs = deepcopy(self.base_row_attrs)
super(BetterBaseForm, self).__init__(*args, **kwargs)
@property
def fieldsets(self):
return FieldsetCollection(self, self._fieldsets)
def __iter__(self):
for bf in super(BetterBaseForm, self).__iter__():
yield _mark_row_attrs(bf, self)
class BetterForm(BetterBaseForm, forms.Form):
__metaclass__ = BetterFormMetaclass
__doc__ = BetterBaseForm.__doc__
class BetterModelForm(BetterBaseForm, forms.ModelForm):
__metaclass__ = BetterModelFormMetaclass
__doc__ = BetterBaseForm.__doc__
| lanky/fabrik | fabrik/snippets/betterforms.py | Python | gpl-2.0 | 8,496 | 0.005532 |
#!/usr/bin/env python3
import dns.resolver
import dns.rdatatype
# This shouldn't be necessary, but for some reason __import__ when
# called from a coroutine, doesn't always work, and I haven't been
# able to figure out why. Possibly this is a 3.4.0 bug that's fixed
# later, but googling for it hasn't worked.
import dns.rdtypes.ANY.MX
import dns.rdtypes.IN.A
import dns.rdtypes.IN.AAAA
import dns.rdtypes.ANY.SOA
import dns.rdtypes.ANY.NS
import smtp
import ssl as tls
import asyncio
import sys
import pdb
import os
import pwd
import socket
import base64
import hashlib
import time
import mailbox
import email
import email.parser
import email.utils
import email.header
import syslog
from concurrent.futures import FIRST_COMPLETED;
mindhome = "/etc/minder"
class coldb:
def parsefile(self, filename):
cf = open(filename, "r")
for line in cf:
line = line.rstrip()
fields = line.split(":")
self.process_fields(fields)
cf.close()
class tlsconf(coldb):
tlsctx = None
cert = None
key = None
name = None
def __init__(self, conffile=(mindhome + "/tls.conf")):
self.parsefile(conffile)
# TLS Context for incoming TLS connections:
# XXX this should be in a separate process!
# It may seem a bit contrary to practice that which ciphers and
# protocols are supported is hardcoded. The reason for this is
# that the end-user doesn't know from ciphers and protocols, and
# so we choose as secure a selection as we can.
#
# This is arguably problematic, because we might prefer crappy
# security to no security for TLS delivery, but we demand good
# security for maildrops, and have no way to distinguish whether
# this is a maildrop or a transfer until _after_ the TLS
# connection is established.
#
# Once STARTTLS support is implemented, we could allow
# maildrops only on the TLS port (465), and reject maildrops on
# the main port (25) and the STARTTLS port (587).
self.tlsctx = tls.SSLContext(tls.PROTOCOL_SSLv23)
self.tlsctx.options = (tls.OP_NO_COMPRESSION | tls.OP_SINGLE_DH_USE |
tls.OP_SINGLE_ECDH_USE |
tls.OP_NO_SSLv2 | tls.OP_NO_SSLv3)
self.tlsctx.verify_mode = tls.CERT_NONE # we don't want client certs
self.tlsctx.set_ciphers("ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" +
"ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:" +
"RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5")
self.tlsctx.load_cert_chain(self.cert, self.key)
def process_fields(self, fields):
if fields[0] == "name":
self.name = fields[1]
count = 2
elif fields[0] == "cert":
self.cert = fields[1]
count = 2
elif fields[0] == "key":
self.key = fields[1]
count = 2
else:
raise Exception("Unknown TLS setting: ", ":".join(fields))
if len(fields) > count:
raise Exception("Too many fields: ", ":".join(fields))
class userdb(coldb):
users = {}
domains = []
wildcard_domains = []
def __init__(self):
self.parsefile(mindhome + "/userdb")
def authenticate(self, username, password):
# Treat the username and password as if they are UTF-8.
# Encoding is not well specified here, so this could cause
# interop problems.
address = self.parse_address(str(username, encoding="utf-8"))
if address == None:
return None
if address[0] not in self.users:
return None
# Find the user entry for the given domain. If a user's
# domain is expressed as a wildcard, we prepend "*." to the
# domain we parsed out of the authentication data to find it,
# since it would be bogus to try to explain to the user why
# their username is jruser@*.example.com.
udmap = self.users[address[0]]
if address[1] not in udmap:
if "*." + address[1] in udmap:
udata = udmap["*." + address[1]]
else:
return None
else:
udata = udmap[address[1]]
hash = base64.standard_b64decode(udata["pass"])
salt = hash[32:]
sha = hashlib.sha256()
sha.update(password)
sha.update(salt)
chash = sha.digest()
# We return the mailbox so that we can use it to validate
# outgoing addresses later--any incoming address that winds
# up in the mailbox of the user who validated is a valid
# outgoing email address for that user.
if chash == hash[:32]:
return udata
return None
def process_fields(self, fields):
# user:mailbox:password:domains...
if len(fields) < 4:
raise Exception("invalid user database entry: %s" % line)
user = fields[0]
mbox = fields[1]
passw = fields[2]
udomains = fields[3:]
if user in self.users:
udmap = users[user]
else:
udmap = {}
for domain in udomains:
udmap[domain] = {'mbox': mbox, 'pass': passw}
if domain[0] == '*' and domain[1] == '.':
if domain not in self.wildcard_domains:
self.wildcard_domains.append(domain)
elif domain not in self.domains:
self.domains.append(domain)
self.users[user] = udmap
def parse_address(self, address):
# Python's parseaddr function doesn't actually do the right thing
# here, so for now this is going to be a very manual process,
# more's the pity.
# XXX does this work with unicode?
parts = address.lower().split("@")
if len(parts) != 2:
return None
user = parts[0]
domain = parts[1]
return [user, domain]
def find_wildcard(self, subdomain, domains):
splode = subdomain.split(".")
for i in range(0, len(splode)):
wildsub = "*." + ".".join(splode[i:])
if wildsub in domains:
return wildsub
return None
def validate_domain(self, address):
# assume address is output of parse_address
domain = address[1]
if domain not in self.domains:
wildcard = self.find_wildcard(domain, self.wildcard_domains)
if wildcard != None:
return True
return False
else:
return True
def find_slot(self, address):
user = address[0]
domain = address[1]
if user not in self.users:
return None
udomains = self.users[user]
for udomain in udomains:
if domain == udomain:
return udomains[udomain]
wildcard = self.find_wildcard(domain, udomains)
if wildcard != None:
return udomains[wildcard]
return None
def validate_address(self, address):
slot = self.find_slot(address)
if slot == None:
return False
return True
class msmtp(smtp.server):
userdb = None
mailbox = None
connections = {}
connection_list = []
message = None
# If we are authenticated, make sure the mail is from
# the authenticated user; if not, make sure that the
# sender passes basic anti-spam checks.
def validate_mailfrom(self, address):
if self.authenticated:
return self.validate_fromuser(address)
else:
return self.validate_sender(address)
def validate_sender(self, address):
# Add sender validation fu here:
return True
@asyncio.coroutine
def validate_fromuser(self, address):
addr = self.userdb.parse_address(address)
# First just check that it's a valid local address
if not self.validate_mailbox(addr):
print("not a local address: ", repr(addr))
return False
# Now check to see if the address delivers to the
# specified mailbox, which should be the mailbox
# of the authenticated user.
slot = self.userdb.find_slot(addr)
if (self.mailbox != None and self.authenticated and
slot["mbox"] == self.mailbox):
self.mail_from = address
self.from_domain = addr[1]
return True
self.push("550 Not authorized.")
return False
def validate_rcptto(self, address):
print("validate_rcptto:", address)
udbaddr = self.userdb.parse_address(address)
if udbaddr == None:
self.push("501 Syntax: RCPT TO: <address>")
syslog.syslog(syslog.LOG_INFO, "501 Syntax: RCPT TO: %s" % address)
return False
if self.authenticated:
print("validate_recipient")
return self.validate_recipient(udbaddr[0], udbaddr[1])
else:
print("validate mailbox")
return self.validate_mailbox(udbaddr)
# Do the A and AAAA queries in parallel.
@asyncio.coroutine
def fetch_addrs(self, resolver, name, arecs, a4recs):
aco = resolver.aquery(name, "A", raise_on_no_answer=False)
a4co = resolver.aquery(name, "AAAA", raise_on_no_answer=False)
co = asyncio.gather(aco, a4co)
(aans, a4ans) = yield from co
if aans.rrset != None:
for rdata in aans:
arecs.append(rdata.address)
if a4ans.rrset != None:
for rdata in a4ans:
a4recs.append(rdata.address)
# Do all the MX fiddling to get a connection to the specified domain
# if we don't already have one.
@asyncio.coroutine
def get_connection(self, user, domain):
# We're already connected. Just return the connection.
if domain in self.connections:
connection = self.connections[domain]
if self.connections[domain] not in self.connection_list:
status = yield from self.send_rcptto(connection, user + "@" + domain)
if status:
self.connections[user + "@" + domain] = connection
self.connection_list.append(connection)
else:
print("bad status after send_rcptto.")
return status
return True
resolver = dns.resolver.Resolver()
resolver.use_edns(0, 0, 1410)
mxs = {}
answer = None
addressable = False
try:
answer = yield from resolver.aquery(domain, "MX")
except dns.resolver.NoAnswer:
# No answer means there's no MX record, so look for an A or
# AAAA record.
arecs = []
a4recs = []
yield from self.fetch_addrs(resolver, domain, arecs, a4recs)
if len(arecs) > 0 or len(a4recs) > 0:
mxs = { 0: [ { "exchange" : domain,
"a": arecs, "aaaa": a4recs } ] }
addressable = True
except NXDOMAIN:
self.push("550 no such domain.")
syslog.syslog(syslog.LOG_INFO, "550 no such domain: %s" % domain)
print("550 no such domain: %s" % domain)
return False
except:
# Temporary failure; we just have to stash the message for this
# address.
self.connections[user + "@" + domain] = None
self.connections[domain] = None
return True
else:
for mx in answer:
if mx.rdtype == dns.rdatatype.MX:
arecs = []
a4recs = []
# If exchange addresses were included in the additional
# section, use those.
for rrset in answer.response.additional:
if rrset.name == mx.exchange:
if rrset.rdtype == dns.rdatatype.A:
for rdata in rrset:
arecs.append(rdata.address)
elif rrset.rdtype == dns.rdatatype.AAAA:
for rdata in rrset:
a4recs.append(rdata.address)
# Otherwise, fetch A and/or AAAA records for exchange
if len(arecs) == 0 and len(a4recs) == 0:
yield from self.fetch_addrs(resolver, mx.exchange, arecs, a4recs)
if len(arecs) > 0 or len(a4recs) > 0:
entry = { "exchange": mx.exchange,
"a": arecs, "aaaa": a4recs}
if mx.preference in mxs:
mxs[mx.preference].append(entry)
else:
mxs[mx.preference] = [entry]
addressable = True
# If we didn't get a single server IP address either out of the
# MX query chain or the A/AAAA query on the name if there was no
# MX, then we can't deliver to this address.
if not addressable:
self.push("550 no exchanger or addresses for domain.")
syslog.syslog(syslog.LOG_INFO,
"550 no exchanger or addresses for: %s" % domain)
print("550 no exchanger or addresses for: %s" % domain)
return False
# Our task now is to get a connection to the most preferable
# Mail Exchanger (MX) we can reach.
# Make a list of all the addresses to try, in order of preference.
# We prefer IPv6 for the first attempt, but interleave IPv6 and
# IPv4 addresses in case one transport is working and the other
# is not. The interleaving is per-exchange, so we always try
# exchanges in order of preference and, among exchanges with the
# same preference, one exchange at a time.
addrs = []
preferences = list(mxs.keys())
preferences.sort()
# Iterate across preference levels
for pref in preferences:
exchanges = mxs[pref]
# Iterate across exchanges at a given preference level
for exchange in exchanges:
arecs = exchange['a']
qrecs = exchange['aaaa']
name = exchange['exchange']
# Interleave the IPv6 and IPv4 addresses for this exchange.
lim = max(len(arecs), len(qrecs))
for i in range(0, lim):
if i < len(qrecs):
addrs.append((qrecs[i], socket.AF_INET6, name))
if i < len(arecs):
addrs.append((arecs[i], socket.AF_INET, name))
# Time is of the essence here, because the mail user agent is
# waiting, and we want to give the user quick feedback, but we
# also want to follow the rules and not boost our spam score
# by delivering to a low-preference MX, so we allow about five
# seconds to complete a connection rather than the usual 90
# seconds. We start connecting every five seconds, and take
# the first connection that completes, dropping the others.
# It should be rare that a connection takes longer than five
# seconds to complete if the exchange is reachable.
connection = yield from self.connect_to_addresses(addrs, 5)
if connection != None:
status = yield from self.send_rcptto(connection, user + "@" + domain)
if status:
self.connections[user + "@" + domain] = connection
self.connections[domain] = connection
self.connection_list.append(connection)
else:
print("horked in send_rcptto")
return status
print("no connection returned.")
return False
@asyncio.coroutine
def send_rcptto(self, connection, mailbox):
# Identify the sender of the current transaction.
try:
yield from connection.mail_from(self.mail_from)
print("sent rcpt_to")
except Exception as x:
self.connections[user + "@" + domain] = x
self.connections[domain] = x
self.push_exception_result(x)
print("connection.mail_from borked:", str(x))
return False
return True
@asyncio.coroutine
def connect_to_addresses(self, addresses, interval):
tasks = []
client_futs = []
greet_futs = []
connection = None
@asyncio.coroutine
def process_completions(timeout):
connection = None
while connection == None and (len(tasks) > 0 or
len(client_futs) > 0 or
len(greet_futs) > 0):
# Figure out how much time to wait, wait at least a
# bit.
remaining = timeout - time.time()
if remaining < 0:
remaining = 0.1
alltasks = tasks.copy()
alltasks.extend(client_futs)
alltasks.extend(greet_futs)
co2 = asyncio.wait(alltasks,
timeout=interval, return_when=FIRST_COMPLETED)
# Wait up to _interval_ seconds for this task or any task created in a
# previous iteration to complete.
(complete, pending) = yield from co2
# if any tasks completed, try to establish a conversation on the
# corresponding socket.
for task in complete:
# If the future was cancelled, it was by something at a higher
# level, so we should just stop.
if task.cancelled():
return None
# If we didn't get an exception, then we should have a connected
# socket.
if task.exception() == None:
if task in tasks:
(transport, client) = task.result()
fut = client.is_ready()
if fut == None: # unlikely
fut = client.hello(self.from_domain)
greet_futs.append(fut)
else:
client_futs.append(fut)
elif task in client_futs:
client = task.result()
fut = client.hello(self.from_domain)
if fut == None: # really unlikely
connection = client
else:
greet_futs.append(fut)
elif task in greet_futs:
connection = task.result()
else:
print("Weird: %s completed but not in %s or %s" %
(task, tasks, client_futs))
if task in tasks:
tasks.remove(task)
elif task in client_futs:
client_futs.remove(task)
else:
greet_futs.remove(task)
if connection != None:
break
if timeout <= time.time():
break
return connection
# Loop through the addresses, starting a connection to the next
# one every _interval_ seconds. When we have a connection,
# wait for it to become ready.
for (address, family, name) in addresses:
print("Connecting to", name, "at", address)
loop = asyncio.get_event_loop()
co = loop.create_connection(smtp.client,
host=address, port=25, family=family)
task = asyncio.async(co)
tasks.append(task)
connection = yield from process_completions(time.time() + interval)
if connection:
break
# At this point if we don't have a connection, but still have pending
# tasks, wait up to an additional _interval_ seconds for one of them to
# cough up a connection.
if connection == None:
connection = yield from process_completions(time.time() + interval)
for task in tasks:
task.cancel()
for task in client_futs:
task.cancel()
for task in greet_futs:
task.cancel()
# Still nothing. Too bad.
if connection == None:
return None
if connection != None:
print("Connected to:", repr(connection.peer))
return connection
# In validate_recipient, we actually try to connect to the mail
# server for the specified recipient. If more than one recipient
# for a message is on the same server (as identified by the
# domain) we use the same connection to validate both recipients.
# When we have validated all the recipients and have a message to
# deliver, we write it to a special mailbox, and then put a link
# in the mailbox for each recipient. Then we try to deliver, and
# on each acknowledged delivery we erase that recipient. If we
# deliver to all the recipients, we erase the stored message.
# If some recipients have transient errors, then we hold the
# message for later delivery to those recipients, but the goal is
# to return as much status information to the sender in realtime
# as possible. Store-and-forward may be necessary for some
# locales where network connectivity is poor, but should not
# be necessary in most cases.
@asyncio.coroutine
def validate_recipient(self, user, domain):
# If we get a False back from get_connection, it means that
# this mailbox will not accept mail from this sender.
if not (yield from self.get_connection(user, domain)):
return False
# Otherwise, see if there's a connection. There will either
# be a connection or None for this domain.
connection = self.connections[domain]
# None means that we weren't able to connect because of a
# temporary failure, which means we have to assume the address
# is valid and try to deliver it later, generating a bounce if
# worse comes to worst.
if connection == None:
#self.push("250 Ok.")
self.push("450 Mailbox temporarily inaccessible; try later.")
return False
try:
result = yield from connection.rcpt_to(user + "@" + domain)
except (smtp.PermanentFailure, smtp.TemporaryFailure) as x:
for line in x.response():
self.push(line)
return False
except Exception as x:
self.push("451 " + str(x))
return False
return True
def validate_mailbox(self, address):
if not self.userdb.validate_domain(address):
self.push('551 Not a local domain, relaying not available.')
syslog.syslog(syslog.LOG_INFO,
"551 Invalid domain: RCPT TO: %s@%s" % tuple(address))
return False
if not self.userdb.validate_address(address):
self.push("550 Mailbox unavailable.")
syslog.syslog(syslog.LOG_INFO,
"550 Invalid mailbox: RCPT TO: %s@%s" % tuple(address))
return False
return True
def data_mode(self):
# If we aren't acting as a maildrop server, just accept the message.
if not self.authenticated:
return False
co = self.start_data_tunnel()
asyncio.async(co)
# Returning true means we're responsible for sending 354 when
# we are ready.
return True
@asyncio.coroutine
def start_data_tunnel(self):
if len(self.connection_list) == 0:
self.push("451 not ready for some reason.")
return
waits = []
if len(self.connection_list) == 0:
self.push("451 no connections.")
for connection in self.connection_list:
print(repr(connection))
fut = connection.data()
waits.append(fut)
while len(waits) > 0:
(complete, waits) = yield from asyncio.wait(waits)
for task in complete:
x = task.exception()
if x != None:
self.push_exception_result(x)
return
self.chunk_state = None
self.line_oriented = False
self.push("354 On my mark, En-gage...")
return
def push_exception_result(self, x):
if (isinstance(x, smtp.TemporaryFailure) or
isinstance(x, smtp.PermanentFailure)):
for line in x.response():
self.push(line)
else:
self.push("451 kabplui!")
# When we are receiving data as a maildrop, we just receive it as chunks
# and send it to all of the connections without processing. We do, however,
# look for the \r\n.\r\n sequence so that we know when we are done.
# There is no guarantee that this will not be broken across two chunks,
# so this is harder than it might seem at first, although not _hard_.
def process_chunk(self, chunk):
resid = None
done = False
eom = b"\r\n.\r\n"
if self.message != None:
self.message = self.message + chunk
else:
self.message = chunk
self.eom_search_start = 0
# Just search the portion of the message we haven't already searched for
# the eom tag.
offset = chunk.find(eom, self.eom_search_start)
# If we didn't find the eom tag, see if there is text at the end of the
# message that could be part of the EOM; if so, set eom_search_start
# to the beginning of that text.
if offset == -1:
eom_offset = 0
for i in range(min(len(chunk), len(eom) - 1), 0, -1):
if chunk.endswith(eom[0:i]):
eom_offset = i
break
self.eom_search_start = self.eom_search_start + len(chunk) - eom_offset
else:
if offset + len(eom) != len(chunk):
resid = chunk[offset+len(eom):]
chunk = chunk[0:offset + len(eom)]
self.message = self.message + chunk
self.line_oriented = True
self.chunk_state = None
# Wait for data confirmations and then send the acknowledgement
co = self.await_data_confirmations()
asyncio.async(co)
for connection in self.connection_list:
connection.send_transparent_data(chunk)
return resid
@asyncio.coroutine
def await_data_confirmations(self):
futs = []
for connection in self.connection_list:
fut = connection.await_data_response(self.message)
if fut != None:
futs.append(fut)
while len(futs) > 0:
(done, futs) = yield from asyncio.wait(futs)
for fut in done:
x = fut.exception()
if x != None:
self.push_exception_result(x)
return
self.reset()
self.rcpttos = []
self.mailfrom = None
self.smtp_state = self.COMMAND
self.num_data_bytes = 0
self.received_lines = []
self.message = None
self.push("250 Message Accepted.")
def process_message(self, peer, mailfrom, rcpttos, data):
syslog.syslog(syslog.LOG_INFO, "Mail from %s via %s" % (mailfrom, peer[0]))
boxes = {}
self.debugstream.flush()
rcvd = "Received: from %s; %s\r\n" % (peer[0], email.utils.formatdate())
#parser = email.parser.Parser()
#message = None
#try:
# message = parser.parsestr(rcvd + data)
#except Exception as e:
# syslog.syslog(syslog.LOG_INFO, "Malformed message: %s", str(e))
#if message != None:
for rcpt in rcpttos:
syslog.syslog(syslog.LOG_INFO, "Delivering to %s" % rcpt)
address = self.userdb.parse_address(rcpt)
if address == None:
raise Exception("Validated address fails to parse: %s\n" % rcpt)
slot = self.userdb.find_slot(address)
if slot == None:
raise Exception("Validated address has no slot: %s\n" % rcpt)
if "mbox" not in slot:
raise Exception("No mailbox for address %s\n" % rcpt)
maildir = mailbox.Maildir("/mailboxes/" + slot["mbox"], create=True)
try:
maildir.add(rcvd + data)
except Exception as e:
syslog.syslog(syslog.LOG_INFO, "Malformed message: %s" % str(e))
return "501 Malformed message"
return False
def authenticate(self, username, password):
slot = self.userdb.authenticate(username, password)
if slot == None:
return False
self.mailbox = slot['mbox']
return True
def reset(self):
connections = self.connections
self.connections = {}
self.connection_list = []
for domain in connections:
if "@" not in domain:
connections[domain].shutdown()
def closed(self):
self.reset()
# Open debugging and logging while we still can.
debug = open("/var/log/smtpd.debug", "a")
syslog.openlog(facility=syslog.LOG_MAIL)
syslog.syslog(syslog.LOG_INFO, "initial message")
# Open the user database.
msmtp.userdb = userdb()
msmtp.debugsream = debug
minder_uent = pwd.getpwnam("minder")
minder_user = minder_uent.pw_uid
# Load the TLS certs...
tlscf = tlsconf()
tlsctx = tlscf.tlsctx
# Get the vent loop...
loop = asyncio.get_event_loop()
# Create a listener...
maildrop_ports = []
maildrop_ports.append(loop.create_server(msmtp, "::", 465,
family=socket.AF_INET6,
ssl=tlsctx, backlog=5, reuse_address=True))
maildrop_ports.append(loop.create_server(msmtp, "0.0.0.0", 465,
family=socket.AF_INET,
ssl=tlsctx, backlog=5, reuse_address=True))
servers = asyncio.gather(*maildrop_ports)
maildrop_servers = loop.run_until_complete(servers)
# XXX fork, close listen socket, chroot, setuid
os.chroot(mindhome)
os.setuid(minder_user)
syslog.syslog(syslog.LOG_INFO, "listening on :: port 25")
try:
loop.run_forever()
except KeyboardInterrupt:
pass
# Close the server
for server in maildrop_servers:
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()
| Abhayakara/minder | smtpd/smtpd.py | Python | gpl-3.0 | 27,553 | 0.013283 |
import sys
import os
import re
from subprocess import PIPE, Popen, call
fq1, fq2, db, prefix = sys.argv[1:]
bowtie2_logfh = open(prefix+'.bowtie2.log','w')
bamfile = prefix+'.bam'
bowtie2_cmd = ['bowtie2', '-x', db, '-1', fq1, '-2', fq2]
samtools_view = ['samtools', 'view', '-bhS', '-']
samtools_sort = ['samtools', 'sort', '-', prefix]
samtools_index = ['samtools', 'index', bamfile]
p1 = Popen(bowtie2_cmd, stdout = PIPE, stderr = bowtie2_logfh)
p2 = Popen(samtools_view, stdin = p1.stdout, stdout = PIPE, stderr = bowtie2_logfh)
p3 = Popen(samtools_sort, stdin = p2.stdout, stdout = PIPE, stderr = bowtie2_logfh)
p1.stdout.close()
p2.stdout.close()
output, err = p3.communicate()
samtools_index = ['samtools', 'index', bamfile]
call(samtools_index, stderr = bowtie2_logfh, stdout = bowtie2_logfh)
bowtie2_logfh.close()
| luo-chengwei/utilitomics | utils/align.PE.py | Python | gpl-3.0 | 826 | 0.029056 |
import abjad
from abjad.tools import abctools
class TimespanSpecifier(abctools.AbjadValueObject):
### CLASS VARIABLES ###
__slots__ = (
'_forbid_fusing',
'_forbid_splitting',
'_minimum_duration',
)
### INITIALIZER ###
def __init__(
self,
forbid_fusing=None,
forbid_splitting=None,
minimum_duration=None,
):
if forbid_fusing is not None:
forbid_fusing = bool(forbid_fusing)
self._forbid_fusing = forbid_fusing
if forbid_splitting is not None:
forbid_splitting = bool(forbid_splitting)
self._forbid_splitting = forbid_splitting
if minimum_duration is not None:
minimum_duration = abjad.Duration(minimum_duration)
self._minimum_duration = minimum_duration
### PUBLIC PROPERTIES ###
@property
def forbid_fusing(self):
return self._forbid_fusing
@property
def forbid_splitting(self):
return self._forbid_splitting
@property
def minimum_duration(self):
return self._minimum_duration
| josiah-wolf-oberholtzer/consort | consort/tools/TimespanSpecifier.py | Python | mit | 1,111 | 0.0036 |
"""
KeepalivePlugin is a pretty cool guy. Eh reflects keep alive packets and doesnt
afraid of anything.
"""
from spockbot.plugins.base import PluginBase
class KeepalivePlugin(PluginBase):
requires = 'Net'
events = {
'PLAY<Keep Alive': 'handle_keep_alive',
}
# Keep Alive - Reflects data back to server
def handle_keep_alive(self, name, packet):
packet.new_ident('PLAY>Keep Alive')
self.net.push(packet)
| gamingrobot/SpockBot | spockbot/plugins/helpers/keepalive.py | Python | mit | 451 | 0 |
from distutils.core import setup, Extension
setup (name = 'krbV',
version = '1.0.90',
description = 'Kerberos V Bindings for Python',
long_description = """
python-krbV allows python programs to use Kerberos 5 authentication/security
""",
author = 'Test',
author_email = '[email protected]',
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU General Public License (LGPL)',
'Operating System :: POSIX :: Linux',
'Programming Language :: C',
'Topic :: System :: Systems Administration :: Authentication/Directory'
],
ext_modules = [Extension ('krbV',
[ 'krb5util.c', 'krb5module.c', 'krb5err.c' ],
libraries = ['krb5', 'com_err']
)
]
)
| ivecera/gentoo-overlay | dev-python/python-krbV/files/setup.py | Python | apache-2.0 | 714 | 0.068627 |
"""Auto-generated file, do not edit by hand. RU metadata"""
from phonenumbers.phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_RU = PhoneMetadata(id='RU', country_code=7, international_prefix='810',
general_desc=PhoneNumberDesc(national_number_pattern='[347-9]\\d{9}', possible_length=(10,)),
fixed_line=PhoneNumberDesc(national_number_pattern='[348]\\d{9}', example_number='3011234567', possible_length=(10,)),
mobile=PhoneNumberDesc(national_number_pattern='9\\d{9}', example_number='9123456789', possible_length=(10,)),
national_prefix='8',
national_prefix_for_parsing='8')
| daviddrysdale/python-phonenumbers | python/tests/testdata/region_RU.py | Python | apache-2.0 | 630 | 0.009524 |
import sys
import re
import numpy as np
from numpy.testing import assert_almost_equal, assert_array_equal
from sklearn.datasets import load_digits
from sklearn.externals.six.moves import cStringIO as StringIO
from sklearn.neural_network import BernoulliRBM
from sklearn.utils.validation import assert_all_finite
np.seterr(all='warn')
Xdigits = load_digits().data
Xdigits -= Xdigits.min()
Xdigits /= Xdigits.max()
def test_fit():
X = Xdigits.copy()
rbm = BernoulliRBM(n_components=64, learning_rate=0.1,
batch_size=10, n_iter=7, random_state=9)
rbm.fit(X)
assert_almost_equal(rbm.score_samples(X).mean(), -21., decimal=0)
# in-place tricks shouldn't have modified X
assert_array_equal(X, Xdigits)
def test_transform():
X = Xdigits[:100]
rbm1 = BernoulliRBM(n_components=16, batch_size=5,
n_iter=5, random_state=42)
rbm1.fit(X)
Xt1 = rbm1.transform(X)
Xt2 = rbm1._mean_hiddens(X)
assert_array_equal(Xt1, Xt2)
def test_sample_hiddens():
rng = np.random.RandomState(0)
X = Xdigits[:100]
rbm1 = BernoulliRBM(n_components=2, batch_size=5,
n_iter=5, random_state=42)
rbm1.fit(X)
h = rbm1._mean_hiddens(X[0])
hs = np.mean([rbm1._sample_hiddens(X[0], rng) for i in range(100)], 0)
assert_almost_equal(h, hs, decimal=1)
def test_fit_gibbs():
"""
Gibbs on the RBM hidden layer should be able to recreate [[0], [1]]
from the same input
"""
rng = np.random.RandomState(42)
X = np.array([[0.], [1.]])
rbm1 = BernoulliRBM(n_components=2, batch_size=2,
n_iter=42, random_state=rng)
# you need that much iters
rbm1.fit(X)
assert_almost_equal(rbm1.components_,
np.array([[0.02649814], [0.02009084]]), decimal=4)
assert_almost_equal(rbm1.gibbs(X), X)
return rbm1
def test_fit_gibbs_sparse():
"""
Gibbs on the RBM hidden layer should be able to recreate [[0], [1]] from
the same input even when the input is sparse, and test against non-sparse
"""
rbm1 = test_fit_gibbs()
rng = np.random.RandomState(42)
from scipy.sparse import csc_matrix
X = csc_matrix([[0.], [1.]])
rbm2 = BernoulliRBM(n_components=2, batch_size=2,
n_iter=42, random_state=rng)
rbm2.fit(X)
assert_almost_equal(rbm2.components_,
np.array([[0.02649814], [0.02009084]]), decimal=4)
assert_almost_equal(rbm2.gibbs(X), X.toarray())
assert_almost_equal(rbm1.components_, rbm2.components_)
def test_gibbs_smoke():
""" just seek if we don't get NaNs sampling the full digits dataset """
rng = np.random.RandomState(42)
X = Xdigits
rbm1 = BernoulliRBM(n_components=42, batch_size=10,
n_iter=20, random_state=rng)
rbm1.fit(X)
X_sampled = rbm1.gibbs(X)
assert_all_finite(X_sampled)
def test_score_samples():
"""Check that the pseudo likelihood is computed without clipping.
http://fa.bianp.net/blog/2013/numerical-optimizers-for-logistic-regression/
"""
rng = np.random.RandomState(42)
X = np.vstack([np.zeros(1000), np.ones(1000)])
rbm1 = BernoulliRBM(n_components=10, batch_size=2,
n_iter=10, random_state=rng)
rbm1.fit(X)
assert((rbm1.score_samples(X) < -300).all())
def test_rbm_verbose():
rbm = BernoulliRBM(n_iter=2, verbose=10)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
rbm.fit(Xdigits)
finally:
sys.stdout = old_stdout
def test_sparse_and_verbose():
"""
Make sure RBM works with sparse input when verbose=True
"""
old_stdout = sys.stdout
sys.stdout = StringIO()
from scipy.sparse import csc_matrix
X = csc_matrix([[0.], [1.]])
rbm = BernoulliRBM(n_components=2, batch_size=2, n_iter=1,
random_state=42, verbose=True)
try:
rbm.fit(X)
s = sys.stdout.getvalue()
# make sure output is sound
assert(re.match(r"Iteration 0, pseudo-likelihood = -?(\d)+(\.\d+)?",
s))
finally:
sio = sys.stdout
sys.stdout = old_stdout
| Eric89GXL/scikit-learn | sklearn/neural_network/tests/test_rbm.py | Python | bsd-3-clause | 4,240 | 0.000236 |
# This is a program for IP limit using picture recognition.
# URL: http://bbs.csdn.net/human_validations/new
# Input: human validations page
# Get the jpeg from the url.
# use picture recognition to get the string from the picture.
# Authentication pass!
#
# this is try to use selenuim to login
import re,os,sys
import time
import urllib2
import cookielib
import urllib
from cookielib import CookieJar
import pytesseract
from selenium import webdriver
from PIL import Image,ImageFilter,ImageEnhance
from selenium.webdriver.common import action_chains
from selenium.webdriver.common.keys import Keys
class PicGet:
def image_to_text(self, img):
text = pytesseract.image_to_string(img)
text = re.sub('[\W]', '', text)
return text
def imageToString(self,picname):
image = Image.open(picname)
ValidCode = self.image_to_text(image)
image.save('captcha.png')
return ValidCode
def validlogin(self,driver,cookie,validcode):
# use the validcode to authentication
PostUrl = "http://bbs.csdn.net/human_validations"
elem = driver.find_element_by_id("captcha")
elem.send_keys(validcode)
elem.send_keys(Keys.TAB)
time.sleep(3)
driver.find_element_by_xpath('//button[@type="submit"]').send_keys(Keys.ENTER)
#submit_button.send_keys(Keys.ENTER)
print "test"
cur_url = driver.current_url
# print (cur_url)
if cur_url == PostUrl:
return True
else:
return False
def validImageGet(self):
AuthUrl = "http://bbs.csdn.net/human_validations/new"
picname = 'captcha.png'
sel = webdriver.Chrome()
sel.get(AuthUrl)
cookie = sel.get_cookies()
auth_token = sel.find_element_by_xpath('//input[@name="authenticity_token"]')
captcha_key = sel.find_element_by_xpath('//input[@id="captcha_key"]')
# submit_button = sel.find_element_by_xpath('//button[@type="submit"]')
# submit_button.submit()
time.sleep(0.3)
picItem = sel.find_element_by_xpath('//img[@alt="captcha"]')
# submit_button = sel.find_element_by_xpath('//button[@type="submit"]')
sel.save_screenshot(picname)
left = int(picItem.location['x'])
top = int(picItem.location['y'])
right = int(picItem.location['x'] + picItem.size['width'])
bottom = int(picItem.location['y'] + picItem.size['height'])
im = Image.open(picname)
# print (left,top,right,bottom)
im = im.crop((left, top, right, bottom))
im.save(picname)
# validcode picture recognize
time.sleep(0.5)
validcode = self.imageToString(picname)
print (validcode)
validcode = "RCNCUB"
#validcode = input("please input:")
if re.match('[A-Z]{6}',validcode):
if self.validlogin(sel,cookie,validcode):
print ('Auth Success!')
else:
print ('Auth Fail!')
#picItem.send_keys(Keys.TAB)
#submit_button.send_keys(Keys.ENTER)
#submit_button.click()
# try:
# submit_button.click()
# except Exception,e:
# print (Exception,":",e)
# validcode = input("please input:")
# if True: # if (len(validcode) == 6) & validcode.isalnum():
# if self.validpost(cookie,auth_token,validcode,captcha_key):# if self.validlogin(sel,cookie,validcode):
# print ('Authentication Pass!')
# break
# else:
# submit_button.click()
time.sleep(5)
sel.quit()
if __name__ == '__main__':
ValidTest = PicGet()
ValidTest.validImageGet() | zhouqilin1993/IntelliDE | crawler/ForumSpider/ForumSpider/valide/picget.py | Python | gpl-3.0 | 3,264 | 0.034007 |
from django.apps import AppConfig
class CalendarFeedConfig(AppConfig):
name = 'info_display.screens.event_schedule'
verbose_name = 'Event Schedule'
| freieslabor/info-display | info_display/screens/event_schedule/apps.py | Python | mpl-2.0 | 158 | 0 |
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrappers for gsutil, for basic interaction with Google Cloud Storage."""
import cStringIO
import hashlib
import logging
import os
import subprocess
import sys
import tarfile
import urllib2
from telemetry.core import util
PUBLIC_BUCKET = 'chromium-telemetry'
INTERNAL_BUCKET = 'chrome-telemetry'
_GSUTIL_URL = 'http://storage.googleapis.com/pub/gsutil.tar.gz'
_DOWNLOAD_PATH = os.path.join(util.GetTelemetryDir(), 'third_party', 'gsutil')
class CloudStorageError(Exception):
@staticmethod
def _GetConfigInstructions(gsutil_path):
if SupportsProdaccess(gsutil_path):
return 'Run prodaccess to authenticate.'
else:
return ('To configure your credentials:\n'
' 1. Run "%s config" and follow its instructions.\n'
' 2. If you have a @google.com account, use that account.\n'
' 3. For the project-id, just enter 0.' % gsutil_path)
class PermissionError(CloudStorageError):
def __init__(self, gsutil_path):
super(PermissionError, self).__init__(
'Attempted to access a file from Cloud Storage but you don\'t '
'have permission. ' + self._GetConfigInstructions(gsutil_path))
class CredentialsError(CloudStorageError):
def __init__(self, gsutil_path):
super(CredentialsError, self).__init__(
'Attempted to access a file from Cloud Storage but you have no '
'configured credentials. ' + self._GetConfigInstructions(gsutil_path))
class NotFoundError(CloudStorageError):
pass
# TODO(tonyg/dtu): Can this be replaced with distutils.spawn.find_executable()?
def _FindExecutableInPath(relative_executable_path, *extra_search_paths):
for path in list(extra_search_paths) + os.environ['PATH'].split(os.pathsep):
executable_path = os.path.join(path, relative_executable_path)
if os.path.isfile(executable_path) and os.access(executable_path, os.X_OK):
return executable_path
return None
def _DownloadGsutil():
logging.info('Downloading gsutil')
response = urllib2.urlopen(_GSUTIL_URL)
with tarfile.open(fileobj=cStringIO.StringIO(response.read())) as tar_file:
tar_file.extractall(os.path.dirname(_DOWNLOAD_PATH))
logging.info('Downloaded gsutil to %s' % _DOWNLOAD_PATH)
return os.path.join(_DOWNLOAD_PATH, 'gsutil')
def FindGsutil():
"""Return the gsutil executable path. If we can't find it, download it."""
# Look for a depot_tools installation.
gsutil_path = _FindExecutableInPath(
os.path.join('third_party', 'gsutil', 'gsutil'), _DOWNLOAD_PATH)
if gsutil_path:
return gsutil_path
# Look for a gsutil installation.
gsutil_path = _FindExecutableInPath('gsutil', _DOWNLOAD_PATH)
if gsutil_path:
return gsutil_path
# Failed to find it. Download it!
return _DownloadGsutil()
def SupportsProdaccess(gsutil_path):
def GsutilSupportsProdaccess():
with open(gsutil_path, 'r') as gsutil:
return 'prodaccess' in gsutil.read()
return _FindExecutableInPath('prodaccess') and GsutilSupportsProdaccess()
def _RunCommand(args):
gsutil_path = FindGsutil()
gsutil = subprocess.Popen([sys.executable, gsutil_path] + args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = gsutil.communicate()
if gsutil.returncode:
if stderr.startswith((
'You are attempting to access protected data with no configured',
'Failure: No handler was ready to authenticate.')):
raise CredentialsError(gsutil_path)
if 'status=403' in stderr or 'status 403' in stderr:
raise PermissionError(gsutil_path)
if stderr.startswith('InvalidUriError') or 'No such object' in stderr:
raise NotFoundError(stderr)
raise CloudStorageError(stderr)
return stdout
def List(bucket):
query = 'gs://%s/' % bucket
stdout = _RunCommand(['ls', query])
return [url[len(query):] for url in stdout.splitlines()]
def Exists(bucket, remote_path):
try:
_RunCommand(['ls', 'gs://%s/%s' % (bucket, remote_path)])
return True
except NotFoundError:
return False
def Move(bucket1, bucket2, remote_path):
url1 = 'gs://%s/%s' % (bucket1, remote_path)
url2 = 'gs://%s/%s' % (bucket2, remote_path)
logging.info('Moving %s to %s' % (url1, url2))
_RunCommand(['mv', url1, url2])
def Delete(bucket, remote_path):
url = 'gs://%s/%s' % (bucket, remote_path)
logging.info('Deleting %s' % url)
_RunCommand(['rm', url])
def Get(bucket, remote_path, local_path):
url = 'gs://%s/%s' % (bucket, remote_path)
logging.info('Downloading %s to %s' % (url, local_path))
_RunCommand(['cp', url, local_path])
def Insert(bucket, remote_path, local_path, publicly_readable=False):
url = 'gs://%s/%s' % (bucket, remote_path)
command_and_args = ['cp']
extra_info = ''
if publicly_readable:
command_and_args += ['-a', 'public-read']
extra_info = ' (publicly readable)'
command_and_args += [local_path, url]
logging.info('Uploading %s to %s%s' % (local_path, url, extra_info))
_RunCommand(command_and_args)
def GetIfChanged(file_path, bucket=None):
"""Gets the file at file_path if it has a hash file that doesn't match.
If the file is not in Cloud Storage, log a warning instead of raising an
exception. We assume that the user just hasn't uploaded the file yet.
Returns:
True if the binary was changed.
"""
hash_path = file_path + '.sha1'
if not os.path.exists(hash_path):
return False
with open(hash_path, 'rb') as f:
expected_hash = f.read(1024).rstrip()
if os.path.exists(file_path) and GetHash(file_path) == expected_hash:
return False
if bucket:
buckets = [bucket]
else:
buckets = [PUBLIC_BUCKET, INTERNAL_BUCKET]
found = False
for bucket in buckets:
try:
url = 'gs://%s/%s' % (bucket, expected_hash)
_RunCommand(['cp', url, file_path])
logging.info('Downloaded %s to %s' % (url, file_path))
found = True
except NotFoundError:
continue
if not found:
logging.warning('Unable to find file in Cloud Storage: %s', file_path)
return found
def GetHash(file_path):
"""Calculates and returns the hash of the file at file_path."""
sha1 = hashlib.sha1()
with open(file_path, 'rb') as f:
while True:
# Read in 1mb chunks, so it doesn't all have to be loaded into memory.
chunk = f.read(1024*1024)
if not chunk:
break
sha1.update(chunk)
return sha1.hexdigest()
| ChromiumWebApps/chromium | tools/telemetry/telemetry/page/cloud_storage.py | Python | bsd-3-clause | 6,562 | 0.012496 |
from django.db import models
#from Cliente.models import Cliente_Direccion
# Create your models here.
class Tipo_sede(models.Model):
nombre = models.CharField(max_length=50, unique=True)
def __unicode__(self):
return self.nombre
class Meta:
verbose_name = "Tipo de sede"
verbose_name_plural = "Tipos de sede"
class Sede(models.Model):
tipo = models.ForeignKey(Tipo_sede)
sede = models.CharField(max_length=250)
piso = models.IntegerField(default=0)
piso_por_escalera = models.IntegerField(default=0)
numero_ambiente = models.IntegerField(default=1)
#direccion_cliente = models.OneToOneField(Cliente_Direccion)
def __unicode__(self):
return u'%s'%(self.tipo)
class Meta:
verbose_name = "Sede"
verbose_name_plural = "Sedes"
class Tipo_Ambiente(models.Model):
tipo_ambiente = models.CharField(max_length=50, unique=True)
def __unicode__(self):
return self.tipo_ambiente
class Meta:
verbose_name = "Tipo de ambiente"
verbose_name_plural = "Tipos de ambientes"
class Ambiente(models.Model):
ambiente = models.ForeignKey(Tipo_Ambiente)
sede = models.ForeignKey(Sede)
def __unicode__(self):
return u'%s - %s'%(self.ambiente, self.sede)
class Meta:
verbose_name = "Ambiente"
verbose_name_plural = "Ambientes"
| jrmendozat/mtvm | Sede/models.py | Python | gpl-2.0 | 1,385 | 0.005776 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
import lxml
'''
A set of functions related to Data Manager and the <data_manager> node
in Opus Project Configuration files.
'''
def get_tool_nodes(project):
'''
Retrieve a list of all nodes that represent tools in the given project
@param project (OpusProject) project to fetch nodes from
@return a list of nodes representing the tools (list(Element))
'''
tool_nodes = []
tool_group_nodes = get_tool_library_node(project).findall("tool_group")
for tool_group in tool_group_nodes:
tool_nodes.extend(tool_group.findall("tool"))
return tool_nodes
def get_tool_node_by_name(project, tool_name):
'''
Fetch a node representing a tool based in it's name.
@param project (OpusProject) project to fetch node from
@param tool_name (str) name of the tool to fetch
@return the node (Element) or None if the node was not found
'''
for node in get_tool_nodes(project):
if node.get('name') == tool_name:
return node
return None
def get_tool_library_node(project):
'''
Get a reference to the tool library for the given project
@param project (OpusProject) project to operate on
@return the node representing the tool library (Element) or None if the
project does not contain a tool library.
'''
if type(project) == lxml.etree._Element and project.tag == "tool_library": return project
return project.find('data_manager/tool_library')
def get_path_to_tool_modules(project):
'''
Get the path to the tool modules
@param project (OpusProject) project to operate on
@return the text representing the path or None if not found
'''
node = project.find('data_manager/path_to_tool_modules')
if node is not None: return node.text
return None
| apdjustino/DRCOG_Urbansim | src/opus_gui/data_manager/data_manager_functions.py | Python | agpl-3.0 | 1,946 | 0.004111 |
"""
This module implements transposition tables, which store positions
and moves to speed up the AI.
"""
import pickle
from easyAI.AI.DictTT import DictTT
class TT:
"""
A tranposition table made out of a Python dictionnary.
It can only be used on games which have a method
game.ttentry() -> string, or tuple
Usage:
>>> table = TT(DictTT(1024)) or table = TT() for default dictionary
>>> ai = Negamax(8, scoring, tt = table) # boosted Negamax !
>>> ai(some_game) # computes a move, fills the table
>>> table.to_file('saved_tt.data') # maybe save for later ?
>>> # later...
>>> table = TT.fromfile('saved_tt.data')
>>> ai = Negamax(8, scoring, tt = table) # boosted Negamax !
Transposition tables can also be used as an AI (``AI_player(tt)``)
but they must be exhaustive in this case: if they are asked for
a position that isn't stored in the table, it will lead to an error.
"""
def __init__(self, own_dict = None):
self.d = own_dict if own_dict != None else dict()
def lookup(self, game):
""" Requests the entry in the table. Returns None if the
entry has not been previously stored in the table. """
return self.d.get(game.ttentry(), None)
def __call__(self,game):
"""
This method enables the transposition table to be used
like an AI algorithm. However it will just break if it falls
on some game state that is not in the table. Therefore it is a
better option to use a mixed algorithm like
>>> # negamax boosted with a transposition table !
>>> Negamax(10, tt= my_dictTT)
"""
return self.d[game.ttentry()]['move']
def store(self, **data):
""" Stores an entry into the table """
entry = data.pop("game").ttentry()
self.d[entry] = data
def tofile(self, filename):
""" Saves the transposition table to a file. Warning: the file
can be big (~100Mo). """
with open(filename, 'w+') as f:
pickle.dump(self, f)
@staticmethod
def fromfile(self, filename):
""" Loads a transposition table previously saved with
``TT.tofile`` """
with open(filename, 'r') as f:
pickle.load(self, filename)
| mrfesol/easyAI | easyAI/AI/TT.py | Python | mit | 2,413 | 0.007874 |
"""
WSGI config for freedoge project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "freedoge.settings")
from dj_static import Cling
from django.core.wsgi import get_wsgi_application
application = Cling(get_wsgi_application())
| craigatron/freedoge | freedoge/wsgi.py | Python | mit | 426 | 0.004695 |
# @author: Zhongyuan Sun
# time: O(log(m*n)), space: O(1)
class Solution(object):
def searchMatrix(self, matrix, target):
"""
:type matrix: List[List[int]]
:type target: int
:rtype: bool
"""
# Solution One: 122ms, beats 50.00%
# if not matrix or not matrix[0]:
# return False
# m = len(matrix)
# n = len(matrix[0])
# i, j = m - 1, 0
# while i >= 0 and j < n:
# if matrix[i][j] > target:
# i -= 1
# elif matrix[i][j] < target:
# j += 1
# else:
# return True
# return False
# Solution Two: 216ms, beats 21.36%
if not matrix or not matrix[0]:
return False
for line in matrix:
if target in line:
return True
return False
| danielsunzhongyuan/my_leetcode_in_python | search_a_2d_matrix_ii_240.py | Python | apache-2.0 | 895 | 0.001117 |
from __future__ import unicode_literals
from collections import OrderedDict
import hashlib
import os
import posixpath
import re
import json
from django.conf import settings
from django.core.cache import (caches, InvalidCacheBackendError,
cache as default_cache)
from django.core.exceptions import ImproperlyConfigured
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.utils.encoding import force_bytes, force_text
from django.utils.functional import LazyObject
from django.utils.six.moves.urllib.parse import unquote, urlsplit, urlunsplit, urldefrag
from django.contrib.staticfiles.utils import check_settings, matches_patterns
class StaticFilesStorage(FileSystemStorage):
"""
Standard file system storage for static files.
The defaults for ``location`` and ``base_url`` are
``STATIC_ROOT`` and ``STATIC_URL``.
"""
def __init__(self, location=None, base_url=None, *args, **kwargs):
if location is None:
location = settings.STATIC_ROOT
if base_url is None:
base_url = settings.STATIC_URL
check_settings(base_url)
super(StaticFilesStorage, self).__init__(location, base_url,
*args, **kwargs)
# FileSystemStorage fallbacks to MEDIA_ROOT when location
# is empty, so we restore the empty value.
if not location:
self.base_location = None
self.location = None
def path(self, name):
if not self.location:
raise ImproperlyConfigured("You're using the staticfiles app "
"without having set the STATIC_ROOT "
"setting to a filesystem path.")
return super(StaticFilesStorage, self).path(name)
class HashedFilesMixin(object):
default_template = """url("%s")"""
patterns = (
("*.css", (
r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""",
(r"""(@import\s*["']\s*(.*?)["'])""", """@import url("%s")"""),
)),
)
def __init__(self, *args, **kwargs):
super(HashedFilesMixin, self).__init__(*args, **kwargs)
self._patterns = OrderedDict()
self.hashed_files = {}
for extension, patterns in self.patterns:
for pattern in patterns:
if isinstance(pattern, (tuple, list)):
pattern, template = pattern
else:
template = self.default_template
compiled = re.compile(pattern, re.IGNORECASE)
self._patterns.setdefault(extension, []).append((compiled, template))
def file_hash(self, name, content=None):
"""
Retuns a hash of the file with the given name and optional content.
"""
if content is None:
return None
md5 = hashlib.md5()
for chunk in content.chunks():
md5.update(chunk)
return md5.hexdigest()[:12]
def hashed_name(self, name, content=None):
parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip()
opened = False
if content is None:
if not self.exists(clean_name):
raise ValueError("The file '%s' could not be found with %r." %
(clean_name, self))
try:
content = self.open(clean_name)
except IOError:
# Handle directory paths and fragments
return name
opened = True
try:
file_hash = self.file_hash(clean_name, content)
finally:
if opened:
content.close()
path, filename = os.path.split(clean_name)
root, ext = os.path.splitext(filename)
if file_hash is not None:
file_hash = ".%s" % file_hash
hashed_name = os.path.join(path, "%s%s%s" %
(root, file_hash, ext))
unparsed_name = list(parsed_name)
unparsed_name[2] = hashed_name
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if '?#' in name and not unparsed_name[3]:
unparsed_name[2] += '?'
return urlunsplit(unparsed_name)
def url(self, name, force=False):
"""
Returns the real URL in DEBUG mode.
"""
if settings.DEBUG and not force:
hashed_name, fragment = name, ''
else:
clean_name, fragment = urldefrag(name)
if urlsplit(clean_name).path.endswith('/'): # don't hash paths
hashed_name = name
else:
hashed_name = self.stored_name(clean_name)
final_url = super(HashedFilesMixin, self).url(hashed_name)
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
query_fragment = '?#' in name # [sic!]
if fragment or query_fragment:
urlparts = list(urlsplit(final_url))
if fragment and not urlparts[4]:
urlparts[4] = fragment
if query_fragment and not urlparts[3]:
urlparts[2] += '?'
final_url = urlunsplit(urlparts)
return unquote(final_url)
def url_converter(self, name, template=None):
"""
Returns the custom URL converter for the given file name.
"""
if template is None:
template = self.default_template
def converter(matchobj):
"""
Converts the matched URL depending on the parent level (`..`)
and returns the normalized and hashed URL using the url method
of the storage.
"""
matched, url = matchobj.groups()
# Completely ignore http(s) prefixed URLs,
# fragments and data-uri URLs
if url.startswith(('#', 'http:', 'https:', 'data:', '//')):
return matched
name_parts = name.split(os.sep)
# Using posix normpath here to remove duplicates
url = posixpath.normpath(url)
url_parts = url.split('/')
parent_level, sub_level = url.count('..'), url.count('/')
if url.startswith('/'):
sub_level -= 1
url_parts = url_parts[1:]
if parent_level or not url.startswith('/'):
start, end = parent_level + 1, parent_level
else:
if sub_level:
if sub_level == 1:
parent_level -= 1
start, end = parent_level, 1
else:
start, end = 1, sub_level - 1
joined_result = '/'.join(name_parts[:-start] + url_parts[end:])
hashed_url = self.url(unquote(joined_result), force=True)
file_name = hashed_url.split('/')[-1:]
relative_url = '/'.join(url.split('/')[:-1] + file_name)
# Return the hashed version to the file
return template % unquote(relative_url)
return converter
def post_process(self, paths, dry_run=False, **options):
"""
Post process the given OrderedDict of files (called from collectstatic).
Processing is actually two separate operations:
1. renaming files to include a hash of their content for cache-busting,
and copying those files to the target storage.
2. adjusting files which contain references to other files so they
refer to the cache-busting filenames.
If either of these are performed on a file, then that file is considered
post-processed.
"""
# don't even dare to process the files if we're in dry run mode
if dry_run:
return
# where to store the new paths
hashed_files = OrderedDict()
# build a list of adjustable files
matches = lambda path: matches_patterns(path, self._patterns.keys())
adjustable_paths = [path for path in paths if matches(path)]
# then sort the files by the directory level
path_level = lambda name: len(name.split(os.sep))
for name in sorted(paths.keys(), key=path_level, reverse=True):
# use the original, local file, not the copied-but-unprocessed
# file, which might be somewhere far away, like S3
storage, path = paths[name]
with storage.open(path) as original_file:
# generate the hash with the original content, even for
# adjustable files.
hashed_name = self.hashed_name(name, original_file)
# then get the original's file content..
if hasattr(original_file, 'seek'):
original_file.seek(0)
hashed_file_exists = self.exists(hashed_name)
processed = False
# ..to apply each replacement pattern to the content
if name in adjustable_paths:
content = original_file.read().decode(settings.FILE_CHARSET)
for patterns in self._patterns.values():
for pattern, template in patterns:
converter = self.url_converter(name, template)
try:
content = pattern.sub(converter, content)
except ValueError as exc:
yield name, None, exc
if hashed_file_exists:
self.delete(hashed_name)
# then save the processed result
content_file = ContentFile(force_bytes(content))
saved_name = self._save(hashed_name, content_file)
hashed_name = force_text(self.clean_name(saved_name))
processed = True
else:
# or handle the case in which neither processing nor
# a change to the original file happened
if not hashed_file_exists:
processed = True
saved_name = self._save(hashed_name, original_file)
hashed_name = force_text(self.clean_name(saved_name))
# and then set the cache accordingly
hashed_files[self.hash_key(name)] = hashed_name
yield name, hashed_name, processed
# Finally store the processed paths
self.hashed_files.update(hashed_files)
def clean_name(self, name):
return name.replace('\\', '/')
def hash_key(self, name):
return name
def stored_name(self, name):
hash_key = self.hash_key(name)
cache_name = self.hashed_files.get(hash_key)
if cache_name is None:
cache_name = self.clean_name(self.hashed_name(name))
# store the hashed name if there was a miss, e.g.
# when the files are still processed
self.hashed_files[hash_key] = cache_name
return cache_name
class ManifestFilesMixin(HashedFilesMixin):
manifest_version = '1.0' # the manifest format standard
manifest_name = 'staticfiles.json'
def __init__(self, *args, **kwargs):
super(ManifestFilesMixin, self).__init__(*args, **kwargs)
self.hashed_files = self.load_manifest()
def read_manifest(self):
try:
with self.open(self.manifest_name) as manifest:
return manifest.read().decode('utf-8')
except IOError:
return None
def load_manifest(self):
content = self.read_manifest()
if content is None:
return OrderedDict()
try:
stored = json.loads(content, object_pairs_hook=OrderedDict)
except ValueError:
pass
else:
version = stored.get('version', None)
if version == '1.0':
return stored.get('paths', OrderedDict())
raise ValueError("Couldn't load manifest '%s' (version %s)" %
(self.manifest_name, self.manifest_version))
def post_process(self, *args, **kwargs):
self.hashed_files = OrderedDict()
all_post_processed = super(ManifestFilesMixin,
self).post_process(*args, **kwargs)
for post_processed in all_post_processed:
yield post_processed
self.save_manifest()
def save_manifest(self):
payload = {'paths': self.hashed_files, 'version': self.manifest_version}
if self.exists(self.manifest_name):
self.delete(self.manifest_name)
contents = json.dumps(payload).encode('utf-8')
self._save(self.manifest_name, ContentFile(contents))
class _MappingCache(object):
"""
A small dict-like wrapper for a given cache backend instance.
"""
def __init__(self, cache):
self.cache = cache
def __setitem__(self, key, value):
self.cache.set(key, value)
def __getitem__(self, key):
value = self.cache.get(key, None)
if value is None:
raise KeyError("Couldn't find a file name '%s'" % key)
return value
def clear(self):
self.cache.clear()
def update(self, data):
self.cache.set_many(data)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
class CachedFilesMixin(HashedFilesMixin):
def __init__(self, *args, **kwargs):
super(CachedFilesMixin, self).__init__(*args, **kwargs)
try:
self.hashed_files = _MappingCache(caches['staticfiles'])
except InvalidCacheBackendError:
# Use the default backend
self.hashed_files = _MappingCache(default_cache)
def hash_key(self, name):
key = hashlib.md5(force_bytes(self.clean_name(name))).hexdigest()
return 'staticfiles:%s' % key
class CachedStaticFilesStorage(CachedFilesMixin, StaticFilesStorage):
"""
A static file system storage backend which also saves
hashed copies of the files it saves.
"""
pass
class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage):
"""
A static file system storage backend which also saves
hashed copies of the files it saves.
"""
pass
class ConfiguredStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class(settings.STATICFILES_STORAGE)()
staticfiles_storage = ConfiguredStorage()
| rooshilp/CMPUT410Lab6 | virt_env/virt1/lib/python2.7/site-packages/django/contrib/staticfiles/storage.py | Python | apache-2.0 | 14,802 | 0.000608 |
__author__ = 'bruno'
import unittest
import algorithms.math.abacus as Abacus
class TestAbacus(unittest.TestCase):
def setUp(self):
pass
def test_abacus1(self):
abacus = Abacus.generate_abacus(0)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |'], abacus)
def test_abacus2(self):
abacus = Abacus.generate_abacus(8)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00 000*****|'], abacus)
def test_abacus3(self):
abacus = Abacus.generate_abacus(32)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000** ***|',
'|00000*** **|'], abacus)
def test_abacus4(self):
abacus = Abacus.generate_abacus(147)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000**** *|',
'|00000* ****|',
'|000 00*****|'], abacus)
def test_abacus5(self):
abacus = Abacus.generate_abacus(986)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|0 0000*****|',
'|00 000*****|',
'|0000 0*****|'], abacus)
def test_abacus6(self):
abacus = Abacus.generate_abacus(5821)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000 *****|',
'|00 000*****|',
'|00000*** **|',
'|00000**** *|'], abacus)
def test_abacus7(self):
abacus = Abacus.generate_abacus(1234)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000**** *|',
'|00000*** **|',
'|00000** ***|',
'|00000* ****|'], abacus)
def test_abacus8(self):
abacus = Abacus.generate_abacus(999)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|0 0000*****|',
'|0 0000*****|',
'|0 0000*****|'], abacus)
def test_abacus9(self):
abacus = Abacus.generate_abacus(13)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000**** *|',
'|00000** ***|'], abacus)
def test_abacus10(self):
abacus = Abacus.generate_abacus(49)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000* ****|',
'|0 0000*****|'], abacus) | bnsantos/python-junk-code | tests/math/abacusTest.py | Python | gpl-2.0 | 5,476 | 0.000183 |
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2015 by Jan-Hendrik Dolling.
:license: Apache 2.0, see LICENSE for more details.
"""
import abc
import six
import json
import logging
from six import string_types
from collections import namedtuple
from configvalidator.tools.exceptions import LoadException, ValidatorException
from configvalidator.tools.parser import ParseObj
logger = logging.getLogger(__name__)
DATA_SECTION_FEATURE = {}
DATA_OPTION_FEATURE = {}
DATA_VALIDATOR = {}
GLOBAL_DATA = {}
def add_data(key, value):
"""Add a value to the global data store
Validators and Features can access this data.
If you create an object an *data* attribute is automatic added to the instance.
This data attribute hold all information that there president during initialization.
So it's possible to add additional meta data to Validators and Features.
Args:
key: The key under which that information is stored.
value: The information
"""
GLOBAL_DATA[key] = value
def remove_data(key):
"""remove a value from the global data store
This removes the data only for new instances.
The information remain available under the key for existing instances.
Args:
key: The key under which that information is stored.
"""
del GLOBAL_DATA[key]
def load_validator(validator_name):
"""loads a validator class
Args:
validator_name: the validator name
Returns:
A validator class which than can be instanced
Raises:
KeyError: iff the validator_name is unknown
"""
try:
return DATA_VALIDATOR[validator_name]
except KeyError:
raise LoadException("no validator with the name {name}".format(name=validator_name))
def load_section_feature(feature_name):
try:
return DATA_SECTION_FEATURE[feature_name]
except KeyError:
raise LoadException(
"no Section feature with the name {name}".format(name=feature_name))
def load_option_feature(feature_name):
try:
return DATA_OPTION_FEATURE[feature_name]
except KeyError:
raise LoadException(
"no option feature with the name {name}".format(name=feature_name))
def load_validator_form_dict(option_dict):
validator_class_name = "default"
validator_class_dict = {}
if isinstance(option_dict, dict) and "validator" in option_dict and option_dict["validator"] is not None:
if isinstance(option_dict["validator"], string_types):
validator_class_name = option_dict["validator"]
else:
validator_class_dict = option_dict["validator"]
if "type" in validator_class_dict:
validator_class_name = validator_class_dict["type"]
del validator_class_dict["type"]
return load_validator(validator_class_name), validator_class_dict
def list_objects():
return dict(validators=[x for x in DATA_VALIDATOR],
option_features=[x for x in DATA_OPTION_FEATURE],
section_features=[x for x in DATA_SECTION_FEATURE])
def decorate_fn(func):
def with_check_input_is_string(self, value):
if not isinstance(value, string_types):
raise ValidatorException("input must be a string.")
return func(self, value)
return with_check_input_is_string
class CollectMetaclass(abc.ABCMeta):
"""Metaclass which safes the class, so that the loads methods can find them.
all classes with this metaclass are automatically collected
The then can be accessed with there name (which is the class attribute
name or the class name if the class has no attribute entry_name)
"""
def __init__(self, name, bases, dct):
"""
called then a new class is created.
the method sets the "name" attribute if not set.
if the attribute inactive is not False, the class
is sort into the Singleton object
- Validator to _env.validators
- Feature to _env.features
"""
super(CollectMetaclass, self).__init__(name, bases, dct)
if object in bases:
# skip base classes
return
if "name" not in dct:
self.name = name
if "inactive" not in dct or dct["inactive"] is not True:
if issubclass(self, Validator):
# only string input for validator functions
self.validate = decorate_fn(self.validate)
DATA_VALIDATOR[self.name] = self
if issubclass(self, SectionFeature):
DATA_SECTION_FEATURE[self.name] = self
if issubclass(self, OptionFeature):
DATA_OPTION_FEATURE[self.name] = self
def __call__(self, *args, **kwargs):
pars_obj = None
if len(args) > 0 and isinstance(args[0], ParseObj):
pars_obj = args[0]
args = args[1:]
res = self.__new__(self, *args, **kwargs)
if isinstance(res, self):
res.data = dict(GLOBAL_DATA)
if pars_obj is not None:
res.data.update(pars_obj.context_data)
res.__init__(*args, **kwargs)
return res
@six.add_metaclass(CollectMetaclass)
class Validator(object):
"""Superclass for Validator's
If you want to write your own Validator use this Superclass.
For Attribute information see Entry class.
a instance lives in one section/option from ini_validator dict
"""
@abc.abstractmethod
def validate(self, value):
"""determine if one input satisfies this validator.
IMPORTAND:
The input is always are String
Args:
value (String): the value to check if it suffused this Validator
Returns:
True or False dependent of if the input suffused the Validator.
"""
@six.add_metaclass(CollectMetaclass)
class SectionFeature(object):
def __init__(self, **kwargs):
"""
:param kwargs: parameter will be ignored
:return:
"""
@abc.abstractmethod
def parse_section(self, parse_obj, section_dict):
"""
:param parse_obj: parser object which stores the data
:param section_dict: the configuration dict for the current section
:return:
"""
@six.add_metaclass(CollectMetaclass)
class OptionFeature(object):
def __init__(self, **kwargs):
"""
:param kwargs: parameter will be ignored
:return:
"""
@abc.abstractmethod
def parse_option(self, parse_obj, option_dict):
"""
:param parse_obj: parser object which stores the data
:param option_dict: the configuration dict for the current option
:return:
"""
| JanHendrikDolling/configvalidator | configvalidator/tools/basics.py | Python | apache-2.0 | 6,745 | 0.00089 |
import unittest
from coalib.results.Result import Result
from coalib.results.result_actions.ResultAction import ResultAction
from coalib.settings.Section import Section
class ResultActionTest(unittest.TestCase):
def test_api(self):
uut = ResultAction()
result = Result('', '')
self.assertRaises(NotImplementedError, uut.apply, 5, {}, {})
self.assertRaises(NotImplementedError,
uut.apply_from_section,
'',
{},
{},
Section('name'))
self.assertRaises(TypeError, uut.apply_from_section, '', {}, {}, 5)
self.assertRaises(TypeError,
uut.apply_from_section,
'',
5,
{},
Section('name'))
self.assertRaises(TypeError,
uut.apply_from_section,
'',
{},
5,
Section('name'))
self.assertEqual(len(uut.get_metadata().non_optional_params), 0)
self.assertEqual(len(uut.get_metadata().optional_params), 0)
self.assertEqual(uut.get_metadata().name, 'ResultAction')
self.assertTrue(uut.is_applicable(result, None, None))
| refeed/coala | tests/results/result_actions/ResultActionTest.py | Python | agpl-3.0 | 1,388 | 0 |
#!/usr/bin/python
import os, re,sys
from remote_exe import create_thread
from subprocess import Popen, PIPE
from main import base_fun
fio_cmd = "fio --name=global --ioengine=sync --bs=4k --rw=read --filename=/dev/{0} --runtime={1} --direct=1 -numjobs=1 -iodepth=4 --name=job"
stress_time = 60
class FIO_FUN(base_fun):
def __init__(self):
super(FIO_FUN, self).__init__()
def get_all_nvme(self):
self.nvme_list=[]
dev_list = os.listdir("/dev/")
#dev_list =['kmsg','stdin','nvme0','nvme0n1', 'nvme1','nvme10','nvme10n1','nvme11','nvme11n1']
p= re.compile(r'nvme\d+n\d')
for dev in dev_list:
match = p.search(dev)
if match:
self.nvme_list.append(dev)
return self.nvme_list
def run(self):
#argv_list = [{'log_name': 'log_path', 'command_line':'fio_testcommnd'},]
print >>sys.stderr,"Start Running"
self.get_all_nvme()
argv_list=[]
for nvme in self.nvme_list:
argv= dict()
argv.update(log_name= nvme)
command = fio_cmd.format(nvme,stress_time)
argv.update(command_line = command)
argv_list.append(argv)
create_thread(argv_list)
return "command executed"
| trelay/multi-executor | main/main_fio.py | Python | mit | 1,282 | 0.013261 |
from bacpypes.apdu import SubscribeCOVRequest, SimpleAckPDU, RejectPDU, AbortPDU
from bacpypes.iocb import IOCB
from bacpypes.core import deferred
from bacpypes.pdu import Address
from bacpypes.object import get_object_class, get_datatype
from bacpypes.constructeddata import Array
from bacpypes.primitivedata import Tag, ObjectIdentifier, Unsigned
from BAC0.core.io.Read import cast_datatype_from_tag
"""
using cov, we build a "context" which is turned into a subscription being sent to
the destination.
Once the IOCB is over, the callback attached to it will execute (subscription_acknowledged)
and we'll get the answer
"""
class SubscriptionContext:
next_proc_id = 1
def __init__(self, address, objectID, confirmed=None, lifetime=None, callback=None):
self.address = address
self.subscriberProcessIdentifier = SubscriptionContext.next_proc_id
SubscriptionContext.next_proc_id += 1
self.monitoredObjectIdentifier = objectID
self.issueConfirmedNotifications = confirmed
self.lifetime = lifetime
self.callback = callback
def cov_notification(self, apdu):
# make a rash assumption that the property value is going to be
# a single application encoded tag
source = apdu.pduSource
object_changed = apdu.monitoredObjectIdentifier
elements = {
"source": source,
"object_changed": object_changed,
"properties": {},
}
for element in apdu.listOfValues:
prop_id = element.propertyIdentifier
datatype = get_datatype(object_changed[0], prop_id)
value = element.value
if not datatype:
value = cast_datatype_from_tag(
element.value, object_changed[0], prop_id
)
else:
# special case for array parts, others are managed by cast_out
if issubclass(datatype, Array) and (
element.propertyArrayIndex is not None
):
if element.propertyArrayIndex == 0:
value = element.value.cast_out(Unsigned)
else:
value = element.value.cast_out(datatype.subtype)
else:
value = element.value.cast_out(datatype)
elements["properties"][prop_id] = value
return elements
class CoV:
"""
Mixin to support COV registration
"""
def send_cov_subscription(self, request):
self._log.debug("Request : {}".format(request))
iocb = IOCB(request)
self._log.debug("IOCB : {}".format(iocb))
iocb.add_callback(self.subscription_acknowledged)
# pass to the BACnet stack
deferred(self.this_application.request_io, iocb)
def subscription_acknowledged(self, iocb):
if iocb.ioResponse:
self._log.info("Subscription success")
if iocb.ioError:
self._log.error("Subscription failed. {}".format(iocb.ioError))
def cov(self, address, objectID, confirmed=True, lifetime=0, callback=None):
address = Address(address)
context = self._build_cov_context(
address, objectID, confirmed=confirmed, lifetime=lifetime, callback=callback
)
request = self._build_cov_request(context)
self.send_cov_subscription(request)
def cancel_cov(self, address, objectID, callback=None):
address = Address(address)
context = self._build_cov_context(
address, objectID, confirmed=None, lifetime=None, callback=callback
)
request = self._build_cov_request(context)
self.send_cov_subscription(request)
def _build_cov_context(
self, address, objectID, confirmed=True, lifetime=None, callback=None
):
context = SubscriptionContext(
address=address,
objectID=objectID,
confirmed=confirmed,
lifetime=lifetime,
callback=callback,
)
self.subscription_contexts[context.subscriberProcessIdentifier] = context
if "context_callback" not in self.subscription_contexts.keys():
self.subscription_contexts["context_callback"] = self.context_callback
return context
def _build_cov_request(self, context):
request = SubscribeCOVRequest(
subscriberProcessIdentifier=context.subscriberProcessIdentifier,
monitoredObjectIdentifier=context.monitoredObjectIdentifier,
)
request.pduDestination = context.address
# optional parameters
if context.issueConfirmedNotifications is not None:
request.issueConfirmedNotifications = context.issueConfirmedNotifications
if context.lifetime is not None:
request.lifetime = context.lifetime
return request
# def context_callback(self, elements, callback=None):
def context_callback(self, elements):
self._log.info("Received COV Notification for {}".format(elements))
# if callback:
# callback()
for device in self.registered_devices:
if str(device.properties.address) == str(elements["source"]):
device[elements["object_changed"]].cov_registered = True
for prop, value in elements["properties"].items():
if prop == "presentValue":
device[elements["object_changed"]]._trend(value)
else:
device[elements["object_changed"]].properties.bacnet_properties[
prop
] = value
break
| ChristianTremblay/BAC0 | BAC0/core/functions/cov.py | Python | lgpl-3.0 | 5,711 | 0.001926 |
# Copyright (c) 2017 Orange. # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_versionedobjects import fields as obj_fields
from networking_bgpvpn.neutron.db import bgpvpn_db
from neutron.api.rpc.callbacks import resources
from neutron.objects import base
from neutron.objects.ports import IPAllocation
from neutron.objects.ports import Port
from neutron.objects.router import RouterPort
from neutron.objects.subnet import Subnet
from neutron_lib.api.definitions import bgpvpn as bgpvpn_api
from neutron_lib.api.definitions import bgpvpn_routes_control as bgpvpn_rc_api
from neutron_lib import constants
from neutron_lib.objects import common_types
from neutron_lib.utils import net as net_utils
LOG = logging.getLogger(__name__)
def _get_gateway_mac_by_subnet(obj_context, subnet):
if not subnet.gateway_ip:
LOG.error("no gateway IP defined for subnet %s", subnet)
return None
ip_allocation = IPAllocation.get_object(obj_context,
network_id=subnet.network_id,
subnet_id=subnet.id,
ip_address=subnet.gateway_ip)
# pylint: disable=no-member
if ip_allocation:
port = Port.get_object(obj_context, id=ip_allocation.port_id)
return str(port.mac_address)
else:
LOG.debug("no port allocated to gateway IP for subnet %s", subnet.id)
return None
def _get_subnets_info(obj_context, net_id):
subnets = Subnet.get_objects(obj_context, network_id=net_id)
return [
{'ip_version': subnet.ip_version,
'id': subnet.id,
'cidr': subnet.cidr,
'gateway_ip': subnet.gateway_ip,
'gateway_mac': _get_gateway_mac_by_subnet(obj_context, subnet)
}
for subnet in subnets
]
class BGPVPNTypeField(obj_fields.AutoTypedField):
AUTO_TYPE = obj_fields.Enum(valid_values=bgpvpn_api.BGPVPN_TYPES)
@base.NeutronObjectRegistry.register
class BGPVPN(base.NeutronDbObject):
# Version 1.0: Initial version
VERSION = '1.0'
new_facade = True
db_model = bgpvpn_db.BGPVPN
fields = {
'id': common_types.UUIDField(),
'project_id': obj_fields.StringField(),
'type': BGPVPNTypeField(),
'name': obj_fields.StringField(nullable=True,
default=None),
'route_targets': obj_fields.ListOfStringsField(nullable=True,
default=[]),
'import_targets': obj_fields.ListOfStringsField(nullable=True,
default=[]),
'export_targets': obj_fields.ListOfStringsField(nullable=True,
default=[]),
'route_distinguishers': obj_fields.ListOfStringsField(nullable=True,
default=[]),
'local_pref': obj_fields.IntegerField(nullable=True),
'vni': obj_fields.IntegerField(nullable=True),
}
fields_no_update = ['id',
'project_id',
'type',
'port_id']
foreign_keys = {'BGPVPNNetAssociation': {'id': 'bgpvpn_id'},
'BGPVPNRouterAssociation': {'id': 'bgpvpn_id'},
'BGPVPNPortAssociation': {'id': 'bgpvpn_id'},
'BGPVPNPortAssociationRoute': {'id': 'bgpvpn_id'},
}
@classmethod
def modify_fields_from_db(cls, db_obj):
result = super(BGPVPN, cls).modify_fields_from_db(db_obj)
for field in ['route_targets',
'import_targets',
'export_targets',
'route_distinguishers']:
if field in result:
result[field] = (result[field].split(',')
if result[field] else [])
return result
@classmethod
def modify_fields_to_db(cls, fields):
result = super(BGPVPN, cls).modify_fields_to_db(fields)
for field in ['route_targets',
'import_targets',
'export_targets',
'route_distinguishers']:
if field in result:
result[field] = ','.join(result.get(field, []))
return result
@base.NeutronObjectRegistry.register
class BGPVPNNetAssociation(base.NeutronDbObject):
# Version 1.0: Initial version
VERSION = '1.0'
new_facade = True
db_model = bgpvpn_db.BGPVPNNetAssociation
fields = {
'id': common_types.UUIDField(),
'project_id': obj_fields.StringField(),
'bgpvpn_id': obj_fields.StringField(),
'bgpvpn': obj_fields.ObjectField('BGPVPN'),
'network_id': obj_fields.StringField(),
'subnets': common_types.ListOfDictOfMiscValuesField(nullable=True)
}
fields_no_update = ['id',
'project_id',
'bgpvpn_id',
'network_id']
synthetic_fields = ['bgpvpn',
'subnets']
def __init__(self, context=None, **kwargs):
super(BGPVPNNetAssociation, self).__init__(context, **kwargs)
def create(self):
with self.db_context_writer(self.obj_context):
super(BGPVPNNetAssociation, self).create()
self.obj_load_attr('subnets')
def obj_load_attr(self, attrname):
if attrname == 'subnets':
self._load_subnets()
else:
super(BGPVPNNetAssociation, self).obj_load_attr(attrname)
def _load_subnets(self, db_obj=None):
# pylint: disable=no-member
subnets_info = _get_subnets_info(self.obj_context, self.network_id)
setattr(self, 'subnets', subnets_info)
self.obj_reset_changes(['subnets'])
def from_db_object(self, obj):
super(BGPVPNNetAssociation, self).from_db_object(obj)
self._load_subnets(obj)
def all_subnets(self, network_id):
# pylint: disable=no-member
return self.subnets
@base.NeutronObjectRegistry.register
class BGPVPNRouterAssociation(base.NeutronDbObject):
# Version 1.0: Initial version
VERSION = '1.0'
new_facade = True
db_model = bgpvpn_db.BGPVPNRouterAssociation
fields = {
'id': common_types.UUIDField(),
'project_id': obj_fields.StringField(),
'bgpvpn_id': obj_fields.StringField(),
'bgpvpn': obj_fields.ObjectField('BGPVPN'),
'router_id': obj_fields.StringField(),
'connected_networks':
common_types.ListOfDictOfMiscValuesField(nullable=True)
}
fields_no_update = ['id',
'project_id',
'bgpvpn_id',
'router_id']
synthetic_fields = ['bgpvpn',
'connected_networks']
def __init__(self, context=None, **kwargs):
super(BGPVPNRouterAssociation, self).__init__(context, **kwargs)
def create(self):
with self.db_context_writer(self.obj_context):
super(BGPVPNRouterAssociation, self).create()
self.obj_load_attr('connected_networks')
def update(self):
with self.db_context_writer(self.obj_context):
if 'connected_networks' in self.obj_what_changed():
self.obj_load_attr('connected_networks')
super(BGPVPNRouterAssociation, self).update()
def obj_load_attr(self, attrname):
if attrname == 'connected_networks':
return self._load_connected_networks()
super(BGPVPNRouterAssociation, self).obj_load_attr(attrname)
@classmethod
def get_objects(cls, context, _pager=None, validate_filters=True,
**kwargs):
if 'network_id' in kwargs and 'router_id' not in kwargs:
ports = Port.get_objects(
context,
network_id=kwargs.pop('network_id'),
device_owner=constants.DEVICE_OWNER_ROUTER_INTF)
router_assocs = []
for port in ports:
# pylint: disable=no-member
router_assocs.extend(
super(BGPVPNRouterAssociation, cls).get_objects(
context, _pager=_pager,
validate_filters=validate_filters,
router_id=RouterPort.get_object(
context, port_id=port.id).router_id,
**kwargs)
)
return router_assocs
return super(BGPVPNRouterAssociation, cls).get_objects(
context, _pager=_pager, validate_filters=validate_filters,
**kwargs)
# pylint: disable=no-member
def _load_connected_networks(self, db_obj=None):
# NOTE(tmorin): can be improved by directly looking up
# Ports with device_id=self.router_id
router_ports = RouterPort.get_objects(
self.obj_context,
router_id=self.router_id)
connected_networks = []
for router_port in router_ports:
port = Port.get_object(self.obj_context,
id=router_port.port_id)
if port:
# router gateway networks are not considered as requiring
# to be bound to BGPVPNs
if port.device_owner == constants.DEVICE_OWNER_ROUTER_GW:
LOG.debug("skipping port %s, because router gateway",
port.id)
continue
connected_networks.append({
'network_id': port.network_id,
'subnets': _get_subnets_info(self.obj_context,
port.network_id)
})
else:
LOG.warning("Couldn't find Port for RouterPort (router:%s,"
"port:%s)", router_port.router_id,
router_port.port_id)
setattr(self, 'connected_networks', connected_networks)
self.obj_reset_changes(['connected_networks'])
def from_db_object(self, obj):
super(BGPVPNRouterAssociation, self).from_db_object(obj)
self._load_connected_networks(obj)
def all_subnets(self, network_id):
# pylint: disable=no-member
for connected_net in self.connected_networks:
if connected_net['network_id'] == network_id:
return connected_net['subnets']
return []
@base.NeutronObjectRegistry.register
class BGPVPNPortAssociation(base.NeutronDbObject):
# Version 1.0: Initial version
VERSION = '1.0'
new_facade = True
db_model = bgpvpn_db.BGPVPNPortAssociation
fields = {
'id': common_types.UUIDField(),
'project_id': obj_fields.StringField(),
'bgpvpn_id': obj_fields.StringField(),
'bgpvpn': obj_fields.ObjectField('BGPVPN'),
'port_id': obj_fields.StringField(),
'subnets': common_types.ListOfDictOfMiscValuesField(nullable=True),
'routes': obj_fields.ListOfObjectsField('BGPVPNPortAssociationRoute'),
'advertise_fixed_ips': obj_fields.BooleanField(default=True)
}
fields_no_update = ['id',
'project_id',
'bgpvpn_id',
'port_id']
synthetic_fields = ['bgpvpn',
'subnets',
'routes']
def __init__(self, context=None, **kwargs):
super(BGPVPNPortAssociation, self).__init__(context, **kwargs)
def create(self):
with self.db_context_writer(self.obj_context):
super(BGPVPNPortAssociation, self).create()
self.obj_load_attr('subnets')
def obj_load_attr(self, attrname):
if attrname == 'subnets':
self._load_subnets()
else:
super(BGPVPNPortAssociation, self).obj_load_attr(attrname)
def _load_subnets(self, db_obj=None):
# pylint: disable=no-member
port = Port.get_object(self.obj_context, id=self.port_id)
subnets_info = _get_subnets_info(self.obj_context, port.network_id)
setattr(self, 'subnets', subnets_info)
self.obj_reset_changes(['subnets'])
def from_db_object(self, obj):
super(BGPVPNPortAssociation, self).from_db_object(obj)
self._load_subnets(obj)
def all_subnets(self, network_id):
# pylint: disable=no-member
return self.subnets
class BGPVPNPortAssociationRouteTypeField(obj_fields.AutoTypedField):
AUTO_TYPE = obj_fields.Enum(valid_values=bgpvpn_rc_api.ROUTE_TYPES)
@base.NeutronObjectRegistry.register
class BGPVPNPortAssociationRoute(base.NeutronDbObject):
# Version 1.0: Initial version
VERSION = '1.0'
new_facade = True
db_model = bgpvpn_db.BGPVPNPortAssociationRoute
fields = {
'id': common_types.UUIDField(),
'port_association_id': common_types.UUIDField(),
'type': BGPVPNPortAssociationRouteTypeField(),
'prefix': common_types.IPNetworkField(nullable=True,
default=None),
'local_pref': obj_fields.IntegerField(nullable=True),
'bgpvpn_id': obj_fields.StringField(nullable=True,
default=None),
'bgpvpn': obj_fields.ObjectField('BGPVPN',
nullable=True,
default=None),
}
fields_no_update = fields.keys()
foreign_keys = {'BGPVPNPortAssociation': {'port_association_id': 'id'},
'BGPVPN': {'bgpvpn_id': 'id'},
}
synthetic_fields = ['bgpvpn']
def __init__(self, *args, **kwargs):
super(BGPVPNPortAssociationRoute, self).__init__(*args, **kwargs)
@classmethod
def modify_fields_from_db(cls, db_obj):
fields = super(BGPVPNPortAssociationRoute,
cls).modify_fields_from_db(db_obj)
if 'prefix' in fields and fields['prefix'] is not None:
fields['prefix'] = net_utils.AuthenticIPNetwork(fields['prefix'])
return fields
@classmethod
def modify_fields_to_db(cls, fields):
result = super(BGPVPNPortAssociationRoute,
cls).modify_fields_to_db(fields)
if 'prefix' in result and result['prefix'] is not None:
result['prefix'] = cls.filter_to_str(result['prefix'])
return result
# we use these objects in set() in bgpvpn agent extension
def __eq__(self, other):
# pylint: disable=no-member
return ((self.type, self.prefix, self.bgpvpn_id) ==
(other.type, other.prefix, other.bgpvpn_id))
def __hash__(self):
# pylint: disable=no-member
return hash((self.type, self.prefix, self.bgpvpn_id))
resources.register_resource_class(BGPVPN)
resources.register_resource_class(BGPVPNNetAssociation)
resources.register_resource_class(BGPVPNRouterAssociation)
resources.register_resource_class(BGPVPNPortAssociation)
| stackforge/networking-bagpipe-l2 | networking_bagpipe/objects/bgpvpn.py | Python | apache-2.0 | 15,705 | 0 |
from django.http import HttpResponse, Http404
from django.template.loader import get_template
from django.template import RequestContext
from django.core.paginator import Paginator, EmptyPage
from django.utils.translation import ugettext as _
from tagging.models import Tag
from messages.models import Message
from settings import LANGUAGE_CODE as lang
from qqq.models import Contribution
from qqq.questions.models import Question
from qqq.revisions.models import Revision
from qqq.collections.models import Collection
from qqq.posts.models import Post
import logging
# the number of results to paginate by
RESULTS_PER_PAGE = 25
def home(request):
"""
Serves the home page, which depends on whether the user is logged in or not.
"""
if request.user.is_authenticated():
return participate(request)
else:
c = RequestContext(request)
if lang == "nl":
c['frontpage'] = 'frontpage_nl.html'
else:
c['frontpage'] = 'frontpage_en.html'
t = get_template('home_public.html')
c['tags_list'] = Tag.objects.cloud_for_model(Question, steps=9, min_count=None)
return HttpResponse(t.render(c))
###################################################################################
#################################### MEMBERS ONLY #################################
###################################################################################
def participate(request):
"""
Serves the home page for logged-in users
"""
t = get_template('home_members.html')
c = RequestContext(request)
filter = request.GET.get(_('filter'), False)
# behold some serious django-fu!
if filter == _('questions'):
c['filter'] = 'questions'
questions = Question.objects.all()
objects = Contribution.objects.filter(question__in=questions).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
elif filter == _('improvements'):
c['filter'] = 'improvements'
revisions = Revision.objects.all()
objects = Contribution.objects.filter(revision__in=revisions).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
elif filter == _('collections'):
c['filter'] = 'collections'
collections = Collection.objects.all()
objects = Contribution.objects.filter(collection__in=collections).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
elif filter == _('posts'):
c['filter'] = 'posts'
posts = Post.objects.all()
objects = Contribution.objects.filter(post__in=posts).select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
else:
objects = Contribution.objects.all().select_related('user', 'question', 'revision', 'collection', 'post', 'tagaction')
p = Paginator(objects, RESULTS_PER_PAGE)
c['type'] = {'all': True}
c['paginator'] = p
try:
c['feed'] = p.page(request.GET.get(_('page'), '1'))
except EmptyPage:
raise Http404
c['message_list'] = Message.objects.inbox_for(request.user)
return HttpResponse(t.render(c))
| rtts/qqq | qqq/views.py | Python | gpl-3.0 | 3,044 | 0.012155 |
# -*- coding: utf-8 -*-
""" Get and post nagios checkresults between nago instances
This extension allows to get status data from a local nagios server.
Also pushing checkresults into a local nagios server, therefore updating nagios status.
"""
from pynag.Parsers import mk_livestatus, config
import time
import os
import os.path
import tempfile
from nago.core import nago_access
import nago.extensions.settings
@nago_access()
def get():
""" Get all nagios status information from a local nagios instance
"""
livestatus = mk_livestatus()
hosts = livestatus.get_hosts()
services = livestatus.get_services()
result = {}
result['hosts'] = hosts
result['services'] = services
return result
@nago_access()
def post(hosts=None, services=None, check_existance=True, create_services=True, create_hosts=False):
""" Puts a list of hosts into local instance of nagios checkresults
Arguments:
hosts -- list of dicts, like one obtained from get_checkresults
services -- list of dicts, like one obtained from get_checkresults
check_existance -- If True, check (and log) if objects already exist before posting
create_services -- If True, autocreate non-existing services (where the host already exists)
create_hosts -- If True, autocreate non-existing hosts
"""
nagios_config = config()
nagios_config.parse_maincfg()
check_result_path = nagios_config.get_cfg_value("check_result_path")
fd, filename = tempfile.mkstemp(prefix='c', dir=check_result_path)
if not hosts:
hosts = []
if not services:
services = []
if check_existance:
checkresults_overhaul(hosts, services, create_services=create_services, create_hosts=create_hosts)
checkresults = '### Active Check Result File Made by Nago ###\n'
checkresults += 'file_time=%s' % (int(time.time()))
checkresults = ''
for host in hosts:
checkresults += _format_checkresult(**host)
for service in services:
checkresults += _format_checkresult(**service)
os.write(fd, checkresults)
# Cleanup and make sure our file is readable by nagios
os.close(fd)
os.chmod(filename, 0644)
# Create an ok file, so nagios knows it's ok to reap our changes
file('%s.ok' % filename, 'w')
@nago_access()
def send(remote_host=None):
""" Send local nagios data to a remote nago instance """
my_data = get()
if not remote_host:
remote_host = nago.extensions.settings.get('server')
remote_node = nago.core.get_node(remote_host)
remote_node.send_command('checkresults', 'post', **my_data)
return "checkresults sent to %s" % remote_host
def checkresults_overhaul(hosts, services, create_services, create_hosts):
""" Iterates through hosts and services, and filters out those who do not exist in our local monitoring core
If create_services or create_hosts are defined, then
"""
def _format_checkresult(**kwargs):
""" Returns a string in a nagios "checkresults" compatible format """
o = {}
o['check_type'] = '1'
o['check_options'] = '0'
o['scheduled_check'] = '1'
o['reschedule_check'] = '1'
o['latency'] = '0.0'
o['start_time'] = '%5f' % time.time()
o['finish_time'] = '%5f' % time.time()
o['early_timeout'] = '0'
o['exited_ok'] = '1'
o['long_plugin_output'] = ''
o['performance_data'] = ''
o.update(locals())
o.update(kwargs)
del o['kwargs']
del o['o']
template = _host_check_result
# Escape all linebreaks if we have them
for k, v in o.items():
if isinstance(v, basestring) and '\n' in v:
o[k] = v.replace('\n', '\\n')
# Livestatus returns slightly different output than status.dat
# Lets normalize everything to status.dat format
if 'name' in o and not 'host_name' in o:
o['host_name'] = o['name']
if 'state' in o and not 'return_code' in o:
o['return_code'] = o['state']
if 'description' in o and not 'service_description' in o:
o['service_description'] = o['description']
if not o['performance_data'] and 'perf_data' in o:
o['performance_data'] = o['perf_data']
# If this is a service (as opposed to host) lets add service_description field in out putput
if 'service_description' in o:
template += "service_description={service_description}\n"
if not o['performance_data'].endswith('\\n'):
o['performance_data'] += '\\n'
# Format the string and return
return template.format(**o) + '\n'
# This is an example of what checkresult file looks like to nagios. This is used by
# _format_checkresult()
_host_check_result = """
host_name={host_name}
check_type={check_type}
check_options=0
scheduled_check=1
reschedule_check=1
latency=0.0
start_time={start_time}
finish_time={finish_time}
early_timeout=0
exited_ok=1
return_code={return_code}
output={plugin_output}{long_plugin_output} | {performance_data}
"""
| opinkerfi/nago | nago/extensions/checkresults.py | Python | agpl-3.0 | 4,996 | 0.003203 |
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from . import common
from . import test_manual
| jeremiahyan/odoo | addons/l10n_ar/tests/__init__.py | Python | gpl-3.0 | 122 | 0 |
"""
https://open.kattis.com/problems/easiest
"""
import sys
def sum_digits(number):
sum_of_digits = 0
while number:
sum_of_digits, number = sum_of_digits + number % 10, number // 10
return sum_of_digits
for line in sys.stdin:
n = int(line)
if n == 0:
break
p = 11
while True:
if sum_digits(n) == sum_digits(n * p):
print(p)
break
p += 1
| cstewart90/kattis-python | easiest/easiest.py | Python | mit | 428 | 0 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
VetEpiGIS-Group
A QGIS plugin
Spatial functions for vet epidemiology
-------------------
begin : 2016-05-06
git sha : $Format:%H$
copyright : (C) 2016 by Norbert Solymosi
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os, shutil
from PyQt5.QtGui import *
from PyQt5.QtCore import SIGNAL, Qt, QSettings, QCoreApplication, QFile, QFileInfo, QDate, QVariant, \
pyqtSignal, QRegExp, QDateTime, QTranslator, QFile, QDir, QIODevice, QTextStream
from qgis.core import QgsDataSourceURI
from PyQt5.QtSql import *
import psycopg2
import psycopg2.extensions
# use unicode!
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
from export_dialog import Ui_Dialog
class Dialog(QDialog, Ui_Dialog):
def __init__(self):
"""Constructor for the dialog.
"""
QDialog.__init__(self)
self.setupUi(self)
self.plugin_dir = ''
self.settings = ''
# # self.comboBox.currentIndexChanged.connect(self.seltype)
# self.commandLinkButton.clicked.connect(self.createNewSLdb)
# self.toolButton.clicked.connect(self.dbSource)
#
# self.groupBox.clicked.connect(self.seltype)
# self.groupBox_2.clicked.connect(self.seltype)
#
# self.commandLinkButton_2.clicked.connect(self.createPGtables)
# self.lineEdit.setText('/home/sn/dev/QGISplugins/VetEpiGIS/groupdata/c.sqlite')
# def dbSource(self):
# dbpath = QFileDialog.getOpenFileName(self, 'Select file', QDir.currentPath(), 'SpatiaLite file (*.sqlite *.*)')
# if not os.path.isfile(dbpath):
# self.lineEdit.setText(dbpath)
#
#
# def seltype(self):
# if self.groupBox.isChecked():
# self.groupBox_2.setChecked(False)
# self.groupBox.setChecked(True)
#
# if self.groupBox_2.isChecked():
# self.groupBox.setChecked(False)
# self.groupBox_2.setChecked(True)
#
# # self.tabWidget.setCurrentIndex(self.comboBox.currentIndex())
# # if self.comboBox.currentText()=='SpatiaLite':
# # self.tabWidget.setCurrentIndex(0)
# # else:
# # self.tabWidget.setCurrentIndex(1)
#
#
# def createNewSLdb(self):
# fileName = QFileDialog.getSaveFileName(self, caption='Create new SpatiaLite database')
# try:
# QApplication.setOverrideCursor(Qt.WaitCursor)
# file = QFile(fileName + '.sqlite')
# dbpath = QFileInfo(file).absoluteFilePath()
# dbfold = os.path.join(self.plugin_dir, 'db')
# if not os.path.isfile(dbpath):
# shutil.copy(os.path.join(dbfold, 'base.sqlite'), dbpath)
# self.lineEdit.setText(dbpath)
#
# db = QSqlDatabase.addDatabase('QSPATIALITE')
# db.setDatabaseName(dbpath)
# db.open()
# query = db.exec_(
# """
# CREATE TABLE outbreaks_point (
# gid INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
# localid text,
# code text,
# largescale text,
# disease text,
# animalno numeric,
# species text,
# production text,
# year numeric,
# status text,
# suspect text,
# confirmation text,
# expiration text,
# notes text,
# hrid text,
# timestamp text,
# grouping text
# );
# """
# )
# query = db.exec_("SELECT AddGeometryColumn('outbreaks_point', 'geom', 4326, 'POINT', 'XY');")
# query = db.exec_(
# """
# CREATE TABLE outbreaks_area (
# gid INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
# localid text,
# code text,
# largescale text,
# disease text,
# animalno numeric,
# species text,
# production text,
# year numeric,
# status text,
# suspect text,
# confirmation text,
# expiration text,
# notes text,
# hrid text,
# timestamp text,
# grouping text
# );
# """
# )
# query = db.exec_("SELECT AddGeometryColumn('outbreaks_area', 'geom', 4326, 'MULTIPOLYGON', 'XY');")
# query = db.exec_(
# """
# CREATE TABLE pois (
# gid INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
# localid text,
# code text,
# activity text,
# hrid text
# );
# """)
# query = db.exec_("SELECT AddGeometryColumn('pois', 'geom', 4326, 'POINT', 'XY');")
# query = db.exec_(
# """
# CREATE TABLE buffers (
# gid INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
# localid text,
# code text,
# largescale text,
# disease text,
# animalno numeric,
# species text,
# production text,
# year numeric,
# status text,
# suspect text,
# confirmation text,
# expiration text,
# notes text,
# hrid text,
# timestamp text
# );
# """)
# query = db.exec_("SELECT AddGeometryColumn('buffers', 'geom', 4326, 'MULTIPOLYGON', 'XY');")
# query = db.exec_(
# """
# CREATE TABLE zones (
# gid INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
# localid text,
# code text,
# disease text,
# zonetype text,
# subpopulation text,
# validity_start text,
# validity_end text,
# legal_framework text,
# competent_authority text,
# biosecurity_measures text,
# control_of_vectors text,
# control_of_wildlife_reservoir text,
# modified_stamping_out text,
# movement_restriction text,
# stamping_out text,
# surveillance text,
# vaccination text,
# other_measure text,
# related text,
# hrid text,
# timestamp text
# );
# """)
# query = db.exec_("SELECT AddGeometryColumn('zones', 'geom', 4326, 'MULTIPOLYGON', 'XY');")
# db.close()
#
# QApplication.restoreOverrideCursor()
# except IOError:
# return False
#
#
# def createPGtables(self):
# QApplication.setOverrideCursor(Qt.WaitCursor)
#
# self.settings.beginGroup('PostgreSQL/connections/' + self.comboBox.currentText())
# PGhost = self.settings.value('host', '')
# PGport = self.settings.value('port', '')
# PGdatabase = self.settings.value('database', '')
# PGusername = self.settings.value('username', '')
# PGpassword = self.settings.value('password', '')
# self.settings.endGroup()
#
# try:
# PGcon = psycopg2.connect(host=PGhost, port=PGport, database=PGdatabase, user=PGusername, password=PGpassword)
# except TypeError:
# PGcon = psycopg2.connect(host=PGhost, database=PGdatabase, user=PGusername, password=PGpassword)
#
# cursor = PGcon.cursor()
# sql = """
# DROP TABLE IF EXISTS xdiseases, xpoitypes, xspecies, xstyles, pois, outbreaks_point, outbreaks_area, buffers, zones;
# CREATE TABLE outbreaks_point (
# gid serial NOT NULL,
# localid character varying(254),
# code character varying(254),
# largescale character varying(254),
# disease character varying(254),
# animalno integer,
# species character varying(254),
# production character varying(254),
# year integer,
# status character varying(254),
# suspect character varying(254),
# confirmation character varying(254),
# expiration character varying(254),
# notes character varying(254),
# hrid character varying(254),
# timestamp character varying(254),
# grouping character varying(254),
# geom geometry,
# CONSTRAINT outbreaks_point_pkey PRIMARY KEY (gid),
# CONSTRAINT enforce_dims_geom CHECK (st_ndims(geom) = 2),
# CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'POINT'::text OR geom IS NULL),
# CONSTRAINT enforce_srid_geom CHECK (st_srid(geom) = 4326)
# );
# CREATE TABLE outbreaks_area (
# gid serial NOT NULL,
# localid character varying(254),
# code character varying(254),
# largescale character varying(254),
# disease character varying(254),
# animalno integer,
# species character varying(254),
# production character varying(254),
# year integer,
# status character varying(254),
# suspect character varying(254),
# confirmation character varying(254),
# expiration character varying(254),
# notes character varying(254),
# hrid character varying(254),
# timestamp character varying(254),
# grouping character varying(254),
# geom geometry,
# CONSTRAINT outbreaks_area_pkey PRIMARY KEY (gid),
# CONSTRAINT enforce_dims_geom CHECK (st_ndims(geom) = 2),
# CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'MULTIPOLYGON'::text OR geom IS NULL),
# CONSTRAINT enforce_srid_geom CHECK (st_srid(geom) = 4326)
# );
# CREATE TABLE pois (
# gid serial NOT NULL,
# localid character varying(254),
# code character varying(254),
# activity character varying(254),
# hrid character varying(254),
# geom geometry,
# CONSTRAINT pois_pkey PRIMARY KEY (gid),
# CONSTRAINT enforce_dims_geom CHECK (st_ndims(geom) = 2),
# CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'POINT'::text OR geom IS NULL),
# CONSTRAINT enforce_srid_geom CHECK (st_srid(geom) = 4326)
# );
# CREATE TABLE buffers (
# gid serial NOT NULL,
# localid character varying(254),
# code character varying(254),
# largescale character varying(254),
# disease character varying(254),
# animalno integer,
# species character varying(254),
# production character varying(254),
# year integer,
# status character varying(254),
# suspect character varying(254),
# confirmation character varying(254),
# expiration character varying(254),
# notes character varying(254),
# hrid character varying(254),
# timestamp character varying(254),
# geom geometry,
# CONSTRAINT buffers_pkey PRIMARY KEY (gid),
# CONSTRAINT enforce_dims_geom CHECK (st_ndims(geom) = 2),
# CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'MULTIPOLYGON'::text OR geom IS NULL),
# CONSTRAINT enforce_srid_geom CHECK (st_srid(geom) = 4326)
# );
# CREATE TABLE zones (
# gid serial NOT NULL,
# localid character varying(254),
# code character varying(254),
# disease character varying(254),
# zonetype character varying(254),
# subpopulation character varying(254),
# validity_start character varying(254),
# validity_end character varying(254),
# legal_framework character varying(254),
# competent_authority character varying(254),
# biosecurity_measures character varying(254),
# control_of_vectors character varying(254),
# control_of_wildlife_reservoir character varying(254),
# modified_stamping_out character varying(254),
# movement_restriction character varying(254),
# stamping_out character varying(254),
# surveillance character varying(254),
# vaccination character varying(254),
# other_measure character varying(254),
# related character varying(254),
# hrid character varying(254),
# timestamp character varying(254),
# geom geometry,
# CONSTRAINT zones_pkey PRIMARY KEY (gid),
# CONSTRAINT enforce_dims_geom CHECK (st_ndims(geom) = 2),
# CONSTRAINT enforce_geotype_geom CHECK (geometrytype(geom) = 'MULTIPOLYGON'::text OR geom IS NULL),
# CONSTRAINT enforce_srid_geom CHECK (st_srid(geom) = 4326)
# );
# CREATE TABLE xdiseases (
# id serial NOT NULL,
# disease character varying(254),
# lang character varying(254),
# enid integer, CONSTRAINT xdiseases_pkey PRIMARY KEY (id)
# );
# CREATE TABLE xpoitypes (
# id serial NOT NULL,
# poitype character varying(254),
# lang character varying(254),
# enid integer, CONSTRAINT xpoitypes_pkey PRIMARY KEY (id)
# );
# CREATE TABLE xspecies (
# id serial NOT NULL,
# species character varying(254),
# lang character varying(254),
# enid integer, CONSTRAINT xspecies_pkey PRIMARY KEY (id)
# );
# CREATE TABLE xstyles (
# id serial NOT NULL,
# ltype character varying(254),
# sld character varying(254), CONSTRAINT xstyles_pkey PRIMARY KEY (id)
# );
# """
# cursor.execute(sql)
#
# PGcon.commit()
#
# # uri = QgsDataSourceURI()
# # uri.setDatabase(os.path.join(os.path.join(self.plugin_dir, 'db'), 'base.sqlite'))
# db = QSqlDatabase.addDatabase('QSPATIALITE')
# # db.setDatabaseName(uri.database())
# db.setDatabaseName(os.path.join(os.path.join(self.plugin_dir, 'db'), 'base.sqlite'))
#
# if not db.open():
# db.open()
#
# sql = ''
# query = db.exec_('select * from xdiseases')
# while query.next():
# sql = sql + """insert into xdiseases (disease, lang) values('%s', '%s');""" % \
# (query.value(1).replace("'", "''"), query.value(2))
# cursor.execute(sql)
#
# sql = ''
# query = db.exec_('select * from xpoitypes')
# while query.next():
# sql = sql + "insert into xpoitypes (poitype, lang) values('%s', '%s');" % \
# (query.value(1), query.value(2))
# cursor.execute(sql)
#
# sql = ''
# query = db.exec_('select * from xspecies')
# while query.next():
# sql = sql + "insert into xspecies (species, lang) values('%s', '%s');" % \
# (query.value(1), query.value(2))
# cursor.execute(sql)
#
# sql = ''
# query = db.exec_('select * from xstyles')
# while query.next():
# sql = sql + "insert into xstyles (ltype, sld) values('%s', '%s');" % \
# (query.value(1), query.value(2))
# cursor.execute(sql)
#
# PGcon.commit()
# db.close()
#
# # result = cursor.fetchone()
#
# # self.lineEdit.setText(sql)
#
# QApplication.restoreOverrideCursor()
| IZSVenezie/VetEpiGIS-Group | plugin/export.py | Python | gpl-3.0 | 17,939 | 0.001672 |
from unexistent_import import * | siddhika1889/Pydev-Editor | tests/pysrc/extendable/recursion_on_non_existent/__init__.py | Python | epl-1.0 | 31 | 0.032258 |
# -- coding: utf-8 --
from resources.lib.gui.gui import cGui
from resources.lib.config import cConfig
from resources.lib import common
import urllib2
import xbmc
import xbmcgui
import string
import logger
import time
import os
import sys
class cDownload:
def __createProcessDialog(self):
oDialog = xbmcgui.DialogProgress()
oDialog.create('Download')
self.__oDialog = oDialog
def __createDownloadFilename(self, sTitle):
#valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
#filename = ''.join(c for c in sTitle if c in valid_chars)
filename = sTitle
filename = filename.replace(' ','_')
return filename
def download(self, url, sTitle, showDialog = True):
sTitle = u'%s' % sTitle.decode('utf-8')
self.__processIsCanceled = False
# extract header
try: header = dict([item.split('=') for item in (url.split('|')[1]).split('&')])
except: header = {}
logger.info('Header for download: %s' % (header))
url = url.split('|')[0]
sTitle = self.__createTitle(url, sTitle)
self.__sTitle = self.__createDownloadFilename(sTitle)
if showDialog:
oGui = cGui()
self.__sTitle = oGui.showKeyBoard(self.__sTitle)
if (self.__sTitle != False and len(self.__sTitle) > 0):
sPath = cConfig().getSetting('download-folder')
if sPath == '':
dialog = xbmcgui.Dialog()
sPath = dialog.browse(3, 'Downloadfolder', 'files', '')
if (sPath != ''):
sDownloadPath = xbmc.translatePath(sPath + '%s' % (self.__sTitle, ))
self.__prepareDownload(url, header, sDownloadPath)
elif self.__sTitle != False:
temp_dir = os.path.join(common.addonPath, "TEMP")
if not os.path.isdir(temp_dir):
os.makedirs(os.path.join(temp_dir))
self.__prepareDownload(url, header, os.path.join(temp_dir, sTitle))
def __prepareDownload(self, url, header, sDownloadPath):
try:
logger.info('download file: ' + str(url) + ' to ' + str(sDownloadPath))
self.__createProcessDialog()
request = urllib2.Request(url, headers=header)
self.__download(urllib2.urlopen(request), sDownloadPath)
except Exception as e:
logger.error(e)
self.__oDialog.close()
def __download(self, oUrlHandler, fpath):
headers = oUrlHandler.info()
iTotalSize = -1
if "content-length" in headers:
iTotalSize = int(headers["Content-Length"])
chunk = 4096
if sys.platform.startswith('win'):
f = open(r'%s' % fpath.decode('utf-8'), "wb")
else:
f = open(r'%s' % fpath, "wb")
iCount = 0
self._startTime = time.time()
while 1:
iCount = iCount +1
data = oUrlHandler.read(chunk)
if not data or self.__processIsCanceled == True:
break
f.write(data)
self.__stateCallBackFunction(iCount, chunk, iTotalSize)
def __createTitle(self, sUrl, sTitle):
aTitle = sTitle.rsplit('.')
if (len(aTitle) > 1):
return sTitle
aUrl = sUrl.rsplit('.')
if (len(aUrl) > 1):
sSuffix = aUrl[-1]
sTitle = sTitle + '.' + sSuffix
return sTitle
def __stateCallBackFunction(self, iCount, iBlocksize, iTotalSize):
timedif = time.time() - self._startTime
currentLoaded = float(iCount * iBlocksize)
if timedif > 0.0:
avgSpd = int(currentLoaded/timedif/1024.0)
else:
avgSpd = 5
iPercent = int( currentLoaded*100/ iTotalSize)
self.__oDialog.update(iPercent, self.__sTitle, '%s/%s@%dKB/s' %(self.__formatFileSize(currentLoaded),self.__formatFileSize(iTotalSize),avgSpd))
if (self.__oDialog.iscanceled()):
self.__processIsCanceled = True
self.__oDialog.close()
def __formatFileSize(self, iBytes):
iBytes = int(iBytes)
if (iBytes == 0):
return '%.*f %s' % (2, 0, 'MB')
return '%.*f %s' % (2, iBytes/(1024*1024.0) , 'MB')
| kabooom/plugin.video.xstream | resources/lib/download.py | Python | gpl-3.0 | 4,301 | 0.006278 |
from .main import PGCli
import sql.parse
import sql.connection
import logging
_logger = logging.getLogger(__name__)
def load_ipython_extension(ipython):
"""This is called via the ipython command '%load_ext pgcli.magic'"""
# first, load the sql magic if it isn't already loaded
if not ipython.find_line_magic("sql"):
ipython.run_line_magic("load_ext", "sql")
# register our own magic
ipython.register_magic_function(pgcli_line_magic, "line", "pgcli")
def pgcli_line_magic(line):
_logger.debug("pgcli magic called: %r", line)
parsed = sql.parse.parse(line, {})
# "get" was renamed to "set" in ipython-sql:
# https://github.com/catherinedevlin/ipython-sql/commit/f4283c65aaf68f961e84019e8b939e4a3c501d43
if hasattr(sql.connection.Connection, "get"):
conn = sql.connection.Connection.get(parsed["connection"])
else:
try:
conn = sql.connection.Connection.set(parsed["connection"])
# a new positional argument was added to Connection.set in version 0.4.0 of ipython-sql
except TypeError:
conn = sql.connection.Connection.set(parsed["connection"], False)
try:
# A corresponding pgcli object already exists
pgcli = conn._pgcli
_logger.debug("Reusing existing pgcli")
except AttributeError:
# I can't figure out how to get the underylying psycopg2 connection
# from the sqlalchemy connection, so just grab the url and make a
# new connection
pgcli = PGCli()
u = conn.session.engine.url
_logger.debug("New pgcli: %r", str(u))
pgcli.connect(u.database, u.host, u.username, u.port, u.password)
conn._pgcli = pgcli
# For convenience, print the connection alias
print(f"Connected: {conn.name}")
try:
pgcli.run_cli()
except SystemExit:
pass
if not pgcli.query_history:
return
q = pgcli.query_history[-1]
if not q.successful:
_logger.debug("Unsuccessful query - ignoring")
return
if q.meta_changed or q.db_changed or q.path_changed:
_logger.debug("Dangerous query detected -- ignoring")
return
ipython = get_ipython()
return ipython.run_cell_magic("sql", line, q.query)
| dbcli/pgcli | pgcli/magic.py | Python | bsd-3-clause | 2,270 | 0.000441 |
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from nova.virt.libvirt import utils
_dmcrypt_suffix = '-dmcrypt'
def volume_name(base):
"""Returns the suffixed dmcrypt volume name.
This is to avoid collisions with similarly named device mapper names for
LVM volumes
"""
return base + _dmcrypt_suffix
def is_encrypted(path):
"""Returns true if the path corresponds to an encrypted disk."""
if path.startswith('/dev/mapper'):
return path.rpartition('/')[2].endswith(_dmcrypt_suffix)
else:
return False
def create_volume(target, device, cipher, key_size, key):
"""Sets up a dmcrypt mapping
:param target: device mapper logical device name
:param device: underlying block device
:param cipher: encryption cipher string digestible by cryptsetup
:param key_size: encryption key size
:param key: encryption key as an array of unsigned bytes
"""
cmd = ('cryptsetup',
'create',
target,
device,
'--cipher=' + cipher,
'--key-size=' + str(key_size),
'--key-file=-')
key = ''.join(map(lambda byte: "%02x" % byte, key))
utils.execute(*cmd, process_input=key, run_as_root=True)
def delete_volume(target):
"""Deletes a dmcrypt mapping
:param target: name of the mapped logical device
"""
utils.execute('cryptsetup', 'remove', target, run_as_root=True)
def list_volumes():
"""Function enumerates encrypted volumes."""
return [dmdev for dmdev in os.listdir('/dev/mapper')
if dmdev.endswith('-dmcrypt')]
| ChinaMassClouds/copenstack-server | openstack/src/nova-2014.2/nova/virt/ovirt/dmcrypt.py | Python | gpl-2.0 | 2,226 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2017 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test autotest_evaluator module."""
from __future__ import print_function
import os
from chromite.cros_bisect import autotest_evaluator
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import cros_test_lib
from chromite.lib import osutils
from chromite.lib import partial_mock
from chromite.lib import remote_access
from chromite.lib import remote_access_unittest
from chromite.lib import repo_util_unittest
class RemoteShScpMock(remote_access_unittest.RemoteShMock):
"""In addition to RemoteSh, it mocks ScpToLocal."""
ATTRS = ('RemoteSh', 'ScpToLocal')
def ScpToLocal(self, _, remote, local, **kwargs):
return self._results['ScpToLocal'].LookupResult(
([remote, local],), kwargs=kwargs)
class TestAutotestEvaluator(cros_test_lib.MockTempDirTestCase):
"""Tests AutotestEvaluator class."""
BOARD = 'samus'
TEST_NAME = 'graphics_WebGLAquarium'
METRIC = 'avg_fps_1000_fishes/summary/value'
REPORT_FILE = 'reports.json'
REMOTE_REPORT_FILE = '%s/results/default/%s/results/results-chart.json' % (
autotest_evaluator.AutotestEvaluator.AUTOTEST_BASE, TEST_NAME)
DUT_IP = '192.168.1.1'
DUT = commandline.DeviceParser(commandline.DEVICE_SCHEME_SSH)(DUT_IP)
TEST_TARGET = '%s/tests/%s/control' % (
autotest_evaluator.AutotestEvaluator.AUTOTEST_BASE, TEST_NAME)
AQUARIUM_REPORT_TEMPLATE = """
{"avg_fps_1000_fishes": {
"summary": {
"units": "fps",
"type": "scalar",
"value": %s,
"improvement_direction": "up"
}
}
}"""
BUILD_LABEL = 'base'
AUTOTEST_CLIENT = autotest_evaluator.AutotestEvaluator.AUTOTEST_CLIENT
TEST_THAT_COMMAND = ['test_that', '-b', BOARD, '--fast', '--args',
'local=True', DUT_IP, TEST_NAME]
def setUp(self):
self.PatchObject(cros_build_lib, 'IsInsideChroot', return_value=False)
# Sets up default options and evaluator object.
self.options = cros_test_lib.EasyAttr(
base_dir=self.tempdir, board=self.BOARD, test_name=self.TEST_NAME,
metric=self.METRIC, metric_take_average=False, reuse_eval=True,
chromium_dir=None, cros_dir=None, eval_passing_only=False)
self.evaluator = autotest_evaluator.AutotestEvaluator(self.options)
def PrepareWebglAquariumReports(self, scores):
"""Prepares graphics_WebGLAquarium reports.
It is a simplified version. What test cares is
"avg_fps_1000_fishes/summary/value". It can produces multiple reports if
more than one score is given.
Args:
scores: List of scores.
Returns:
A list of file names storing in report directory.
"""
result = []
num_reports = len(scores)
for ith, score in enumerate(scores, start=1):
report_file = os.path.join(
self.tempdir, 'reports',
'results-chart.%s.%d-%d.json' % (self.BUILD_LABEL, ith, num_reports))
osutils.WriteFile(report_file, self.AQUARIUM_REPORT_TEMPLATE % score)
result.append(report_file)
return result
def UpdateOptionsAndEvaluator(self, options_to_update):
"""Updates self.options and self.evaluator.
Based on updated self.options, it creates a new AutotestEvaluator instance
and assigns to self.evaluator.
Args:
options_to_update: a dict to update self.options.
"""
self.options.update(options_to_update)
self.evaluator = autotest_evaluator.AutotestEvaluator(self.options)
def testInit(self):
"""Tests that AutotestEvaluator() works as expected."""
base_dir = self.tempdir
self.assertEqual(base_dir, self.evaluator.base_dir)
self.assertEqual(os.path.join(base_dir, 'reports'),
self.evaluator.report_base_dir)
self.assertTrue(os.path.isdir(self.evaluator.report_base_dir))
self.assertEqual(self.BOARD, self.evaluator.board)
self.assertEqual(self.TEST_NAME, self.evaluator.test_name)
self.assertEqual(self.METRIC, self.evaluator.metric)
self.assertFalse(self.evaluator.metric_take_average)
self.assertTrue(self.evaluator.reuse_eval)
self.assertEqual(os.path.join(base_dir, 'chromium'),
self.evaluator.chromium_dir)
# With chromium_dir specified and flip booleans.
self.UpdateOptionsAndEvaluator(
dict(chromium_dir='/tmp/chromium', reuse_eval=False))
self.assertFalse(self.evaluator.metric_take_average)
self.assertFalse(self.evaluator.reuse_eval)
self.assertEqual('/tmp/chromium', self.evaluator.chromium_dir)
def testInitMissingRequiredArgs(self):
"""Tests that AE() raises exception when required options are missing."""
options = cros_test_lib.EasyAttr()
with self.assertRaises(Exception) as cm:
autotest_evaluator.AutotestEvaluator(options)
exception_message = str(cm.exception)
self.assertIn('Missing command line', exception_message)
self.assertIn('AutotestEvaluator', exception_message)
for arg in autotest_evaluator.AutotestEvaluator.REQUIRED_ARGS:
self.assertIn(arg, exception_message)
def testRunTestFromDut(self):
"""Tests that RunTestFromDut() invokes expected commands."""
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.REMOTE_REPORT_FILE, self.REPORT_FILE], returncode=0,
mock_attr='ScpToLocal')
self.assertTrue(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def testRunTestFromDutSanityCheckFail(self):
"""Tests RunTestFromDut() when autotest control file is missing."""
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=1)
self.assertFalse(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def testRunTestFromDutLsSshError(self):
"""Tests RunTestFromDut() when autotest control file is missing."""
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET],
returncode=remote_access.SSH_ERROR_CODE)
self.assertFalse(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def testRunTestFromDutAutotestSshErrorWithEvalPassingOnly(self):
"""Tests RunTestFromDut() with failed autotest and --eval-passing-only."""
self.UpdateOptionsAndEvaluator(dict(eval_passing_only=True))
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET],
returncode=remote_access.SSH_ERROR_CODE)
self.assertFalse(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def testRunTestFromDutAutotestFailWithEvalPassingOnly(self):
"""Tests RunTestFromDut() with failed autotest and --eval-passing-only."""
self.UpdateOptionsAndEvaluator(dict(eval_passing_only=True))
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=1)
self.assertFalse(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def testRunTestFromDutAutotestFailWithFailsafe(self):
"""Tests RunTestFromDut() with failed autotest.
Even if the autotest fails to run, RunTestFromDut() tries to retrieve report
from DUT.
"""
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=1)
rsh_mock.AddCmdResult(
[self.REMOTE_REPORT_FILE, self.REPORT_FILE], returncode=0,
mock_attr='ScpToLocal')
self.assertTrue(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def testRunTestFromDutScpReportFail(self):
"""Tests RunTestFromDut() when it failed to remote copy report file."""
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.REMOTE_REPORT_FILE, self.REPORT_FILE], returncode=1,
mock_attr='ScpToLocal')
self.assertFalse(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def testRunTestFromDutAutotestFailWithFailsafeScpReportFail(self):
"""Tests RunTestFromDut() with autotest failed with --eval-failsafe.
Even if the autotest fails to run, with --eval-failsafe set,
RunTestFromDut() tries to retrieve report from DUT. This test checks
report missing case.
"""
self.UpdateOptionsAndEvaluator(dict(eval_failsafe=True))
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=1)
rsh_mock.AddCmdResult(
[self.REMOTE_REPORT_FILE, self.REPORT_FILE], returncode=1,
mock_attr='ScpToLocal')
self.assertFalse(self.evaluator.RunTestFromDut(self.DUT, self.REPORT_FILE))
def GetTestResultPath(self, evaluator):
"""Returns base path storing test result.
Args:
evaluator: Evaluator object.
Returns:
Path where the evaulator stores test results.
"""
return evaluator.ResolvePathFromChroot(os.path.join(
'/tmp', 'test_that_latest', 'results-1-%s' % evaluator.test_name))
def testLookupReportFile(self):
"""Tests LookupReportFile().
Tests that it invokes expected command and performs path normalization.
"""
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
results_base_path = self.GetTestResultPath(self.evaluator)
find_command_result = (
'./%s/results/results-chart.json\n' % self.TEST_NAME)
command_mock.AddCmdResult(
['find', '.', '-name', 'results-chart.json'],
kwargs={'cwd': results_base_path, 'capture_output': True},
output=find_command_result)
self.assertEqual(
os.path.join(results_base_path, self.TEST_NAME, 'results',
'results-chart.json'),
self.evaluator.LookupReportFile())
def testLookupReportFileMissing(self):
"""Tests LookupReportFile() when the report does not exist."""
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
results_base_path = self.GetTestResultPath(self.evaluator)
command_mock.AddCmdResult(
['find', '.', '-name', 'results-chart.json'],
kwargs={'cwd': results_base_path, 'capture_output': True},
output='')
self.assertIsNone(self.evaluator.LookupReportFile())
def WriteTestResult(self, evaluator, score=0):
"""Writes a test result to evaluator's default location.
Args:
evaluator: Evaluator object.
score: score of the result.
Returns:
(path to test result file, result file's content)
"""
result_dir = self.GetTestResultPath(evaluator)
osutils.SafeMakedirs(result_dir)
result_path = os.path.join(result_dir, evaluator.RESULT_FILENAME)
result_content = self.AQUARIUM_REPORT_TEMPLATE % score
osutils.WriteFile(result_path, result_content)
return (result_path, result_content)
def testRunTestFromHost(self):
"""Tests TestFromHost().
Tests that it invokes expected commands and report file being copied to
designated path.
"""
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
self.SkipMaySetupBoard()
command_mock.AddCmdResult(self.TEST_THAT_COMMAND, returncode=0)
report_path, report_content = self.WriteTestResult(self.evaluator)
command_mock.AddCmdResult(
['find', '.', '-name', 'results-chart.json'],
output=report_path)
# Make sure report file is copied to designated path.
target_report_file = os.path.join(self.tempdir, 'stored-results-chart.json')
osutils.SafeUnlink(target_report_file)
self.assertTrue(
self.evaluator.RunTestFromHost(self.DUT, target_report_file))
self.assertExists(target_report_file)
self.assertEqual(report_content, osutils.ReadFile(target_report_file))
def testRunTestFromHostTestThatFailWithEvalPassingOnly(self):
"""Tests TestFromHost() with failed autotest and --eval-passing-only."""
self.UpdateOptionsAndEvaluator(dict(eval_passing_only=True))
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
self.SkipMaySetupBoard()
command_mock.AddCmdResult(self.TEST_THAT_COMMAND, returncode=1)
self.assertFalse(self.evaluator.RunTestFromHost(self.DUT, self.REPORT_FILE))
def testRunTestFromHostTestThatFail(self):
"""Tests TestFromHost() with failed autotest.
It will try evaluating test result.
"""
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
self.SkipMaySetupBoard()
# test_that failed.
command_mock.AddCmdResult(self.TEST_THAT_COMMAND, returncode=1)
# However, report is obtained successfully.
report_path, report_content = self.WriteTestResult(self.evaluator)
command_mock.AddCmdResult(
['find', '.', '-name', 'results-chart.json'],
output=report_path)
# Make sure report file is copied to designated path.
target_report_file = os.path.join(self.tempdir, 'stored-results-chart.json')
osutils.SafeUnlink(target_report_file)
self.assertTrue(
self.evaluator.RunTestFromHost(self.DUT, target_report_file))
self.assertExists(target_report_file)
self.assertEqual(report_content, osutils.ReadFile(target_report_file))
def testRunTestFromHostTestThatFailReportMissing(self):
"""Tests TestFromHost() with failed autotest and without report."""
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
self.SkipMaySetupBoard()
# test_that failed.
command_mock.AddCmdResult(self.TEST_THAT_COMMAND, returncode=1)
# And report file is missing.
command_mock.AddCmdResult(
['find', '.', '-name', 'results-chart.json'], output='')
self.assertFalse(self.evaluator.RunTestFromHost(self.DUT, self.REPORT_FILE))
def testRunTestFromHostReportFileMissing(self):
"""Tests TestFromHost() when test report file does not exist."""
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
self.SkipMaySetupBoard()
command_mock.AddCmdResult(self.TEST_THAT_COMMAND, returncode=0)
command_mock.AddCmdResult(
['find', '.', '-name', 'results-chart.json'], output='')
self.assertFalse(self.evaluator.RunTestFromHost(self.DUT, self.REPORT_FILE))
def testGetAutotestMetricValue(self):
"""Tests that GetAutotestMetricValue() extracts score correctly."""
score = 56.73
report_file = self.PrepareWebglAquariumReports([score])[0]
self.assertEqual(score,
self.evaluator.GetAutotestMetricValue(report_file))
def testGetAutotestMetricValueMetricTakeAverage(self):
"""Tests that GetAutotestMetricValue() extracts averaged scores."""
# metric_take_average=True
self.UpdateOptionsAndEvaluator(dict(metric_take_average=True))
scores = [55, 57, 58]
# A report's value is a list of scores.
report_file = self.PrepareWebglAquariumReports([scores])[0]
self.assertAlmostEqual(56.66,
self.evaluator.GetAutotestMetricValue(report_file),
delta=0.01)
def testEvaluateRunTestFromDut(self):
"""Tests Evaluate() which runs test from DUT."""
# Mock RunTestFromDut success.
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
# Prepare result for evaluate.
score = 56.73
report_file = self.PrepareWebglAquariumReports([score])[0]
rsh_mock.AddCmdResult(
[self.REMOTE_REPORT_FILE, report_file], returncode=0,
kwargs={'check': False}, mock_attr='ScpToLocal')
eval_score = self.evaluator.Evaluate(self.DUT, self.BUILD_LABEL)
self.assertEqual(1, len(eval_score.values))
self.assertEqual(score, eval_score.values[0])
self.assertEqual(score, eval_score.mean)
self.assertEqual(0.0, eval_score.variance)
self.assertEqual(0.0, eval_score.std)
def testEvaluateTwiceRunTestFromDut(self):
"""Tests Evaluate() with repeat=2 which runs test from DUT."""
# Mock RunTestFromDut success.
rsh_mock = self.StartPatcher(RemoteShScpMock())
rsh_mock.AddCmdResult(
['rm', '-f', self.REMOTE_REPORT_FILE], returncode=0)
rsh_mock.AddCmdResult(
['ls', self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
rsh_mock.AddCmdResult(
[self.AUTOTEST_CLIENT, self.TEST_TARGET], returncode=0)
# Prepare two results for evaluate.
scores = [56, 58]
report_files = self.PrepareWebglAquariumReports(scores)
for report_file in report_files:
rsh_mock.AddCmdResult(
[self.REMOTE_REPORT_FILE, report_file], returncode=0,
mock_attr='ScpToLocal')
eval_score = self.evaluator.Evaluate(self.DUT, self.BUILD_LABEL, repeat=2)
self.assertEqual(2, len(eval_score.values))
self.assertEqual(scores[0], eval_score.values[0])
self.assertEqual(scores[1], eval_score.values[1])
self.assertEqual(57, eval_score.mean)
self.assertEqual(2.0, eval_score.variance)
self.assertAlmostEqual(1.414, eval_score.std, delta=0.01)
def SkipMaySetupBoard(self):
"""Let evaluator.MaySetupBoard() returns True without action.
It touches /build/{board} directory inside chroot so that MaySetupBoard()
thinks the board is already set up.
"""
osutils.SafeMakedirs(os.path.join(
self.evaluator.cros_dir, 'chroot', 'build', self.evaluator.board))
def testEvaluateFromHost(self):
"""Tests Evaluate() which runs test from host."""
# Mock RunTestFromDut fail.
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
command_mock.AddCmdResult(
partial_mock.InOrder(['rm', '-f', self.REMOTE_REPORT_FILE]),
returncode=0)
command_mock.AddCmdResult(
partial_mock.InOrder([self.AUTOTEST_CLIENT, self.TEST_TARGET]),
returncode=1)
self.SkipMaySetupBoard()
# Mock RunTestFromHost success.
command_mock.AddCmdResult(self.TEST_THAT_COMMAND, returncode=0)
# Mock 'find' and returns a result file for verify.
score = 59.9
report_file_in_chroot, _ = self.WriteTestResult(self.evaluator, score)
command_mock.AddCmdResult(
['find', '.', '-name', 'results-chart.json'],
output=report_file_in_chroot)
eval_score = self.evaluator.Evaluate(self.DUT, self.BUILD_LABEL)
self.assertEqual(1, len(eval_score.values))
self.assertEqual(score, eval_score.values[0])
self.assertEqual(score, eval_score.mean)
self.assertEqual(0.0, eval_score.variance)
self.assertEqual(0.0, eval_score.std)
def testCheckLastEvaluate(self):
"""Tests CheckLastEvaluate().
Test that it extracts score from last evaluation result.
"""
scores = [56, 58]
self.PrepareWebglAquariumReports(scores)
eval_score = self.evaluator.CheckLastEvaluate(self.BUILD_LABEL, repeat=2)
self.assertEqual(2, len(eval_score.values))
self.assertEqual(scores[0], eval_score.values[0])
self.assertEqual(scores[1], eval_score.values[1])
self.assertEqual(57, eval_score.mean)
self.assertEqual(2.0, eval_score.variance)
self.assertAlmostEqual(1.414, eval_score.std, delta=0.01)
def testCheckLastEvaluateDifferentLabel(self):
"""Tests that CheckLastEvaluate() failed to extracts score."""
scores = [56, 58]
self.PrepareWebglAquariumReports(scores)
eval_score = self.evaluator.CheckLastEvaluate('different_build', repeat=2)
self.assertEqual(0, len(eval_score))
def testCheckLastEvaluateFlagUnset(self):
"""Tests CheckLastEvaluate() when "reuse_eval" option is unset.
Tests that it always returns empty score when "reuse_eval" option is unset.
"""
# 'reuse_eval' set to False.
self.UpdateOptionsAndEvaluator(dict(reuse_eval=False))
scores = [56, 58]
self.PrepareWebglAquariumReports(scores)
eval_score = self.evaluator.CheckLastEvaluate(self.BUILD_LABEL, repeat=2)
self.assertEqual(0, len(eval_score))
def CreateCommandMockForRepo(self, cwd):
"""Creates a command mock and add commands "repo init" "repo sync".
Args:
cwd: Directory for running "repo init".
Returns:
command_mock object.
"""
command_mock = self.StartPatcher(cros_test_lib.RunCommandMock())
command_mock.AddCmdResult(
['repo', 'init', '--manifest-url',
'https://chromium.googlesource.com/chromiumos/manifest.git',
'--repo-url',
'https://chromium.googlesource.com/external/repo.git'],
kwargs={'cwd': cwd},
side_effect=repo_util_unittest.RepoInitSideEffects)
command_mock.AddCmdResult(
[repo_util_unittest.RepoCmdPath(cwd), 'sync', '--jobs', '8'],
kwargs={'cwd': cwd})
return command_mock
def testSetupCrosRepo(self):
"""Tests SetupCrosRepo() by verifying commands it emits."""
unused_command_mock = self.CreateCommandMockForRepo(self.evaluator.cros_dir)
self.evaluator.SetupCrosRepo()
def testMaySetupBoardAlreadyDone(self):
"""Tests MaySetupBoard() that board is already set."""
# mkdir board path inside chroot.
self.SkipMaySetupBoard()
self.assertTrue(self.evaluator.MaySetupBoard())
def testMaySetupBoard(self):
"""Tests MaySetupBoard()."""
command_mock = self.CreateCommandMockForRepo(self.evaluator.cros_dir)
kwargs_run_chroot = {
'enter_chroot': True,
'chroot_args': ['--chrome_root', self.evaluator.chromium_dir,
'--no-ns-pid'],
'cwd': self.evaluator.cros_dir}
command_mock.AddCmdResult(
['setup_board', '--board', self.BOARD], kwargs=kwargs_run_chroot)
command_mock.AddCmdResult(
['./build_packages', '--board', self.BOARD], kwargs=kwargs_run_chroot)
self.assertTrue(self.evaluator.MaySetupBoard())
def testMaySetupBoardBuildPackageFailed(self):
"""Tests MaySetupBoard()."""
command_mock = self.CreateCommandMockForRepo(self.evaluator.cros_dir)
kwargs_run_chroot = {
'enter_chroot': True,
'chroot_args': ['--chrome_root', self.evaluator.chromium_dir,
'--no-ns-pid'],
'cwd': self.evaluator.cros_dir}
command_mock.AddCmdResult(
['setup_board', '--board', self.BOARD], kwargs=kwargs_run_chroot)
command_mock.AddCmdResult(
['./build_packages', '--board', self.BOARD], kwargs=kwargs_run_chroot,
returncode=1)
self.assertFalse(self.evaluator.MaySetupBoard())
| endlessm/chromium-browser | third_party/chromite/cros_bisect/autotest_evaluator_unittest.py | Python | bsd-3-clause | 24,070 | 0.0027 |
record_lookupAll_genericLookup_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
record_lookupAll_genericLookup_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
record_lookupAll_genericLookup_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
record_lookupAll_fieldSpecificLookup_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
record_lookupAll_fieldSpecificLookup_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
record_lookupAll_fieldSpecificLookup_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
record_lookupAll_normLookup_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
record_lookupAll_normLookup_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
record_lookupAll_normLookup_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
record_regexAll_genericRegex_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
record_regexAll_genericRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
record_regexAll_genericRegex_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
record_regexAll_fieldSpecificRegex_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
record_regexAll_fieldSpecificRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
record_regexAll_fieldSpecificRegex_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
record_regexAll_normRegex_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
record_regexAll_normRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
record_regexAll_normRegex_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
record_normIncludes_included_caught = [{"emailAddress": "[email protected]", "field1": "findgoodinvaluejunk"}]
record_normIncludes_included_uncaught = [{"emailAddress": "[email protected]", "field1": "nothere"}]
record_normIncludes_excluded_caught = [{"emailAddress": "[email protected]", "field1": "findgoodinvaluejunk butstuffnobad"}]
record_normIncludes_excluded_uncaught = [{"emailAddress": "[email protected]", "field1": "findgoodinvaluejunk uncaught"}]
record_normIncludes_begins_caught = [{"emailAddress": "[email protected]", "field1": "abcdefg"}]
record_normIncludes_begins_uncaught = [{"emailAddress": "[email protected]", "field1": "hijklmnop"}]
record_normIncludes_ends_caught = [{"emailAddress": "[email protected]", "field1": "qrstuvwxyz"}]
record_normIncludes_ends_uncaught = [{"emailAddress": "[email protected]", "field1": "notalpha"}]
record_normIncludes_notChecked = [{"emailAddress": "[email protected]", "field2": "doesnotmatter"}]
record_deriveAll_deriveValue_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findthis"}]
record_derive_sort = [{"emailAddress": "[email protected]", "field1": "", "field3": "findthis", "field4": "nofindthis"}]
record_deriveAll_deriveValue_overwriteFalse = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "findthis"}]
record_deriveAll_deriveValue_blankIfNoMatch = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "youwillnotfindthis"}]
record_deriveAll_deriveValue_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "dontfindthis"}]
record_deriveAll_deriveValue_notChecked = [{"emailAddress": "[email protected]", "field3": "", "field2": "findthis"}]
record_deriveAll_copyValue = [{"emailAddress": "[email protected]", "field1": "", "field2": "newvalue"}]
record_deriveAll_deriveRegex_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findthis"}]
record_deriveAll_deriveRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "dontfindthis"}]
record_deriveAll_deriveRegex_notChecked = [{"emailAddress": "[email protected]", "field3": "", "field2": "findthis"}]
record_deriveAll_deriveRegex_overwriteFalse = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "findthis"}]
record_deriveAll_deriveRegex_blankIfNoMatch = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "youwillnotfindthis"}]
record_deriveAll_deriveIncludes_included_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findgoodinvaluejunk"}]
record_deriveAll_deriveIncludes_included_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "nothere"}]
record_deriveAll_deriveIncludes_excluded_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findgoodinvaluejunk butstuffnobad"}]
record_deriveAll_deriveIncludes_excluded_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findgoodinvaluejunk uncaught"}]
record_deriveAll_deriveIncludes_begins_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "abcdefg"}]
record_deriveAll_deriveIncludes_begins_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "hijklmnop"}]
record_deriveAll_deriveIncludes_ends_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "qrstuvwxyz"}]
record_deriveAll_deriveIncludes_ends_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "notalpha"}]
record_deriveAll_deriveIncludes_notChecked = [{"emailAddress": "[email protected]", "field2": "", "field3": "doesnotmatter"}]
record_deriveAll_deriveIncludes_overwriteFalse = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "findgoodinvaluejunk"}]
record_deriveAll_deriveIncludes_blankIfNoMatch = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "nothere"}]
#
history_genericLookup_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
history_genericLookup_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
history_genericLookup_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
history_fieldSpecificLookup_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
history_fieldSpecificLookup_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
history_fieldSpecificLookup_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
history_normLookup_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
history_normLookup_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
history_normLookup_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
history_genericRegex_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
history_genericRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
history_genericRegex_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
history_fieldSpecificRegex_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
history_fieldSpecificRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
history_fieldSpecificRegex_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
history_normRegex_caught = [{"emailAddress": "[email protected]", "field1": "badvalue"}]
history_normRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "badvalue-uncaught"}]
history_normRegex_notChecked = [{"emailAddress": "[email protected]", "field2": "badvalue"}]
history_normIncludes_included_caught = [{"emailAddress": "[email protected]", "field1": "findgoodinvaluejunk"}]
history_normIncludes_included_uncaught = [{"emailAddress": "[email protected]", "field1": "nothere"}]
history_normIncludes_notChecked = [{"emailAddress": "[email protected]", "field2": "doesnotmatter"}]
history_deriveValue_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findthis"}]
history_deriveValue_overwriteFalse = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "findthis"}]
history_deriveValue_blankIfNoMatch = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "youwillnotfindthis"}]
history_deriveValue_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "dontfindthis"}]
history_deriveValue_notChecked = [{"emailAddress": "[email protected]", "field3": "", "field2": "findthis"}]
history_copyValue = [{"emailAddress": "[email protected]", "field1": "", "field2": "newvalue"}]
history_deriveRegex_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findthis"}]
history_deriveRegex_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "dontfindthis"}]
history_deriveRegex_notChecked = [{"emailAddress": "[email protected]", "field3": "", "field2": "findthis"}]
history_deriveRegex_overwriteFalse = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "findthis"}]
history_deriveRegex_blankIfNoMatch = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "youwillnotfindthis"}]
history_deriveIncludes_included_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findgoodinvaluejunk"}]
history_deriveIncludes_included_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "nothere"}]
history_deriveIncludes_excluded_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findgoodinvaluejunk butstuffnobad"}]
history_deriveIncludes_excluded_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "findgoodinvaluejunk uncaught"}]
history_deriveIncludes_begins_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "abcdefg"}]
history_deriveIncludes_begins_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "hijklmnop"}]
history_deriveIncludes_ends_caught = [{"emailAddress": "[email protected]", "field1": "", "field2": "qrstuvwxyz"}]
history_deriveIncludes_ends_uncaught = [{"emailAddress": "[email protected]", "field1": "", "field2": "notalpha"}]
history_deriveIncludes_notChecked = [{"emailAddress": "[email protected]", "field2": "", "field3": "doesnotmatter"}]
history_deriveIncludes_overwriteFalse = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "findgoodinvaluejunk"}]
history_deriveIncludes_blankIfNoMatch = [{"emailAddress": "[email protected]", "field1": "oldvalue", "field2": "nothere"}]
history_deriveIncludes_deriveCheckMatch = [{"emailAddress": "[email protected]", "field1": "", "field2": "findgoodinvaluejunk", "field3":"notright"}]
record_returnHistoryId_False = [{"emailAddress": "[email protected]"}]
record_writeContactHistory_False = [{"emailAddress": "[email protected]"}]
record_writeContactHistory_writeConfig = [{"emailAddress": "[email protected]"}]
record_writeContactHistory_historyCurrent1 = [{"emailAddress": "[email protected]"}]
record_writeContactHistory_historyCurrent2 = [{"emailAddress": "[email protected]"}]
record_configDoesNotExist = [{"emailAddress": "[email protected]"}]
record_udf_beforeGenericValidation = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_beforeGenericRegex = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_beforeFieldSpecificValidation = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_beforeFieldSpecificRegex = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_beforeNormalization = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_beforeNormalizationRegex = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_beforeNormalizationIncludes = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_beforeDeriveData = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_afterProcessing = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_sort = [{"emailAddress": "[email protected]", "field1": ""}]
record_udf_afterProcessing_invalidFcn = [{"emailAddress": "[email protected]", "field1": ""}]
record_dwmAll_noConfig = [{"emailAddress": "[email protected]"}]
| rh-marketingops/dwm | dwm/test/test_records.py | Python | gpl-3.0 | 11,832 | 0.008874 |
# Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ['async', 'async_limited']
import threading
import functools
enabled = True
def async(func):
@functools.wraps(func)
def async_wrapper(*args, **kargs):
t = threading.Thread(target=func, args=args, kwargs=kargs)
t.start()
return t
if enabled:
return async_wrapper
else:
return func
def async_limited(count):
def limited_func(func):
semaphore = threading.BoundedSemaphore(count)
# closure with func and semaphore
def thread_wrap(*args, **kargs):
result = func(*args, **kargs)
semaphore.release()
return result
# closure with thread_wrap and semaphore
@functools.wraps(func)
def limited_wrap(*args, **kargs):
semaphore.acquire()
t = threading.Thread(target=thread_wrap, args=args, kwargs=kargs)
t.start()
return t
if enabled:
return limited_wrap
else:
return func
return limited_func
| ncbray/pystream | bin/util/application/async.py | Python | apache-2.0 | 1,436 | 0.021588 |
import pytest
from tests.support.asserts import assert_success
"""
Tests that WebDriver can transcend site origins.
Many modern browsers impose strict cross-origin checks,
and WebDriver should be able to transcend these.
Although an implementation detail, certain browsers
also enforce process isolation based on site origin.
This is known to sometimes cause problems for WebDriver implementations.
"""
@pytest.fixture
def frame_doc(inline):
return inline("<title>cheese</title><p>frame")
@pytest.fixture
def one_frame_doc(inline, frame_doc):
return inline("<title>bar</title><iframe src='%s'></iframe>" % frame_doc)
@pytest.fixture
def nested_frames_doc(inline, one_frame_doc):
return inline("<title>foo</title><iframe src='%s'></iframe>" % one_frame_doc)
def get_title(session):
return session.transport.send(
"GET", "session/{session_id}/title".format(**vars(session)))
def test_no_iframe(session, inline):
session.url = inline("<title>Foobar</title><h2>Hello</h2>")
result = get_title(session)
assert_success(result, "Foobar")
def test_iframe(session, one_frame_doc):
session.url = one_frame_doc
frame = session.find.css("iframe", all=False)
session.switch_frame(frame)
session.find.css("p", all=False)
response = get_title(session)
assert_success(response, "bar")
def test_nested_iframe(session, nested_frames_doc):
session.url = nested_frames_doc
outer_frame = session.find.css("iframe", all=False)
session.switch_frame(outer_frame)
inner_frame = session.find.css("iframe", all=False)
session.switch_frame(inner_frame)
session.find.css("p", all=False)
response = get_title(session)
assert_success(response, "foo")
@pytest.mark.parametrize("domain", ["", "alt"], ids=["same_origin", "cross_origin"])
def test_origin(session, inline, iframe, domain):
session.url = inline("<title>foo</title>{}".format(
iframe("<title>bar</title><p>frame", domain=domain)))
frame = session.find.css("iframe", all=False)
session.switch_frame(frame)
session.find.css("p", all=False)
response = get_title(session)
assert_success(response, "foo")
| scheib/chromium | third_party/blink/web_tests/external/wpt/webdriver/tests/get_title/iframe.py | Python | bsd-3-clause | 2,183 | 0.000916 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the test_stream module."""
# pytype: skip-file
import unittest
import apache_beam as beam
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.options.pipeline_options import TypeOptions
from apache_beam.portability import common_urns
from apache_beam.portability.api.beam_interactive_api_pb2 import TestStreamFileHeader
from apache_beam.portability.api.beam_interactive_api_pb2 import TestStreamFileRecord
from apache_beam.portability.api.beam_runner_api_pb2 import TestStreamPayload
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.test_stream import ElementEvent
from apache_beam.testing.test_stream import OutputFormat
from apache_beam.testing.test_stream import ProcessingTimeEvent
from apache_beam.testing.test_stream import ReverseTestStream
from apache_beam.testing.test_stream import TestStream
from apache_beam.testing.test_stream import WatermarkEvent
from apache_beam.testing.test_stream import WindowedValueHolder
from apache_beam.testing.test_stream_service import TestStreamServiceController
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.testing.util import equal_to_per_window
from apache_beam.transforms import trigger
from apache_beam.transforms import window
from apache_beam.transforms.window import FixedWindows
from apache_beam.transforms.window import TimestampedValue
from apache_beam.utils import timestamp
from apache_beam.utils.timestamp import Timestamp
from apache_beam.utils.windowed_value import PaneInfo
from apache_beam.utils.windowed_value import PaneInfoTiming
from apache_beam.utils.windowed_value import WindowedValue
class TestStreamTest(unittest.TestCase):
def test_basic_test_stream(self):
test_stream = (TestStream()
.advance_watermark_to(0)
.add_elements([
'a',
WindowedValue('b', 3, []),
TimestampedValue('c', 6)])
.advance_processing_time(10)
.advance_watermark_to(8)
.add_elements(['d'])
.advance_watermark_to_infinity()) # yapf: disable
self.assertEqual(
test_stream._events,
[
WatermarkEvent(0),
ElementEvent([
TimestampedValue('a', 0),
TimestampedValue('b', 3),
TimestampedValue('c', 6),
]),
ProcessingTimeEvent(10),
WatermarkEvent(8),
ElementEvent([
TimestampedValue('d', 8),
]),
WatermarkEvent(timestamp.MAX_TIMESTAMP),
])
def test_test_stream_errors(self):
with self.assertRaises(
AssertionError, msg=('Watermark must strictly-monotonically advance.')):
_ = (TestStream().advance_watermark_to(5).advance_watermark_to(4))
with self.assertRaises(
AssertionError,
msg=('Must advance processing time by positive amount.')):
_ = (TestStream().advance_processing_time(-1))
with self.assertRaises(
AssertionError,
msg=('Element timestamp must be before timestamp.MAX_TIMESTAMP.')):
_ = (
TestStream().add_elements(
[TimestampedValue('a', timestamp.MAX_TIMESTAMP)]))
def test_basic_execution(self):
test_stream = (TestStream()
.advance_watermark_to(10)
.add_elements(['a', 'b', 'c'])
.advance_watermark_to(20)
.add_elements(['d'])
.add_elements(['e'])
.advance_processing_time(10)
.advance_watermark_to(300)
.add_elements([TimestampedValue('late', 12)])
.add_elements([TimestampedValue('last', 310)])
.advance_watermark_to_infinity()) # yapf: disable
class RecordFn(beam.DoFn):
def process(
self,
element=beam.DoFn.ElementParam,
timestamp=beam.DoFn.TimestampParam):
yield (element, timestamp)
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
with TestPipeline(options=options) as p:
my_record_fn = RecordFn()
records = p | test_stream | beam.ParDo(my_record_fn)
assert_that(
records,
equal_to([
('a', timestamp.Timestamp(10)),
('b', timestamp.Timestamp(10)),
('c', timestamp.Timestamp(10)),
('d', timestamp.Timestamp(20)),
('e', timestamp.Timestamp(20)),
('late', timestamp.Timestamp(12)),
('last', timestamp.Timestamp(310)),
]))
def test_multiple_outputs(self):
"""Tests that the TestStream supports emitting to multiple PCollections."""
letters_elements = [
TimestampedValue('a', 6),
TimestampedValue('b', 7),
TimestampedValue('c', 8),
]
numbers_elements = [
TimestampedValue('1', 11),
TimestampedValue('2', 12),
TimestampedValue('3', 13),
]
test_stream = (TestStream()
.advance_watermark_to(5, tag='letters')
.add_elements(letters_elements, tag='letters')
.advance_watermark_to(10, tag='numbers')
.add_elements(numbers_elements, tag='numbers')) # yapf: disable
class RecordFn(beam.DoFn):
def process(
self,
element=beam.DoFn.ElementParam,
timestamp=beam.DoFn.TimestampParam):
yield (element, timestamp)
options = StandardOptions(streaming=True)
p = TestPipeline(options=options)
main = p | test_stream
letters = main['letters'] | 'record letters' >> beam.ParDo(RecordFn())
numbers = main['numbers'] | 'record numbers' >> beam.ParDo(RecordFn())
assert_that(
letters,
equal_to([('a', Timestamp(6)), ('b', Timestamp(7)),
('c', Timestamp(8))]),
label='assert letters')
assert_that(
numbers,
equal_to([('1', Timestamp(11)), ('2', Timestamp(12)),
('3', Timestamp(13))]),
label='assert numbers')
p.run()
def test_multiple_outputs_with_watermark_advancement(self):
"""Tests that the TestStream can independently control output watermarks."""
# Purposely set the watermark of numbers to 20 then letters to 5 to test
# that the watermark advancement is per PCollection.
#
# This creates two PCollections, (a, b, c) and (1, 2, 3). These will be
# emitted at different times so that they will have different windows. The
# watermark advancement is checked by checking their windows. If the
# watermark does not advance, then the windows will be [-inf, -inf). If the
# windows do not advance separately, then the PCollections will both
# windowed in [15, 30).
letters_elements = [
TimestampedValue('a', 6),
TimestampedValue('b', 7),
TimestampedValue('c', 8),
]
numbers_elements = [
TimestampedValue('1', 21),
TimestampedValue('2', 22),
TimestampedValue('3', 23),
]
test_stream = (TestStream()
.advance_watermark_to(0, tag='letters')
.advance_watermark_to(0, tag='numbers')
.advance_watermark_to(20, tag='numbers')
.advance_watermark_to(5, tag='letters')
.add_elements(letters_elements, tag='letters')
.advance_watermark_to(10, tag='letters')
.add_elements(numbers_elements, tag='numbers')
.advance_watermark_to(30, tag='numbers')) # yapf: disable
options = StandardOptions(streaming=True)
p = TestPipeline(options=options)
main = p | test_stream
# Use an AfterWatermark trigger with an early firing to test that the
# watermark is advancing properly and that the element is being emitted in
# the correct window.
letters = (
main['letters']
| 'letter windows' >> beam.WindowInto(
FixedWindows(15),
trigger=trigger.AfterWatermark(early=trigger.AfterCount(1)),
accumulation_mode=trigger.AccumulationMode.DISCARDING)
| 'letter with key' >> beam.Map(lambda x: ('k', x))
| 'letter gbk' >> beam.GroupByKey())
numbers = (
main['numbers']
| 'number windows' >> beam.WindowInto(
FixedWindows(15),
trigger=trigger.AfterWatermark(early=trigger.AfterCount(1)),
accumulation_mode=trigger.AccumulationMode.DISCARDING)
| 'number with key' >> beam.Map(lambda x: ('k', x))
| 'number gbk' >> beam.GroupByKey())
# The letters were emitted when the watermark was at 5, thus we expect to
# see the elements in the [0, 15) window. We used an early trigger to make
# sure that the ON_TIME empty pane was also emitted with a TestStream.
# This pane has no data because of the early trigger causes the elements to
# fire before the end of the window and because the accumulation mode
# discards any data after the trigger fired.
expected_letters = {
window.IntervalWindow(0, 15): [
('k', ['a', 'b', 'c']),
('k', []),
],
}
# Same here, except the numbers were emitted at watermark = 20, thus they
# are in the [15, 30) window.
expected_numbers = {
window.IntervalWindow(15, 30): [
('k', ['1', '2', '3']),
('k', []),
],
}
assert_that(
letters,
equal_to_per_window(expected_letters),
label='letters assert per window')
assert_that(
numbers,
equal_to_per_window(expected_numbers),
label='numbers assert per window')
p.run()
def test_dicts_not_interpreted_as_windowed_values(self):
test_stream = (TestStream()
.advance_processing_time(10)
.advance_watermark_to(10)
.add_elements([{'a': 0, 'b': 1, 'c': 2}])
.advance_watermark_to_infinity()) # yapf: disable
class RecordFn(beam.DoFn):
def process(
self,
element=beam.DoFn.ElementParam,
timestamp=beam.DoFn.TimestampParam):
yield (element, timestamp)
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
with TestPipeline(options=options) as p:
my_record_fn = RecordFn()
records = p | test_stream | beam.ParDo(my_record_fn)
assert_that(
records,
equal_to([
({
'a': 0, 'b': 1, 'c': 2
}, timestamp.Timestamp(10)),
]))
def test_windowed_values_interpreted_correctly(self):
windowed_value = WindowedValueHolder(
WindowedValue(
'a',
Timestamp(5), [beam.window.IntervalWindow(5, 10)],
PaneInfo(True, True, PaneInfoTiming.ON_TIME, 0, 0)))
test_stream = (TestStream()
.advance_processing_time(10)
.advance_watermark_to(10)
.add_elements([windowed_value])
.advance_watermark_to_infinity()) # yapf: disable
class RecordFn(beam.DoFn):
def process(
self,
element=beam.DoFn.ElementParam,
timestamp=beam.DoFn.TimestampParam,
window=beam.DoFn.WindowParam):
yield (element, timestamp, window)
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
with TestPipeline(options=options) as p:
my_record_fn = RecordFn()
records = p | test_stream | beam.ParDo(my_record_fn)
assert_that(
records,
equal_to([
('a', timestamp.Timestamp(5), beam.window.IntervalWindow(5, 10)),
]))
def test_gbk_execution_no_triggers(self):
test_stream = (
TestStream().advance_watermark_to(10).add_elements([
'a', 'b', 'c'
]).advance_watermark_to(20).add_elements(['d']).add_elements([
'e'
]).advance_processing_time(10).advance_watermark_to(300).add_elements([
TimestampedValue('late', 12)
]).add_elements([TimestampedValue('last', 310)
]).advance_watermark_to_infinity())
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
p = TestPipeline(options=options)
records = (
p
| test_stream
| beam.WindowInto(FixedWindows(15), allowed_lateness=300)
| beam.Map(lambda x: ('k', x))
| beam.GroupByKey())
# TODO(BEAM-2519): timestamp assignment for elements from a GBK should
# respect the TimestampCombiner. The test below should also verify the
# timestamps of the outputted elements once this is implemented.
# assert per window
expected_window_to_elements = {
window.IntervalWindow(0, 15): [
('k', ['a', 'b', 'c']),
('k', ['late']),
],
window.IntervalWindow(15, 30): [
('k', ['d', 'e']),
],
window.IntervalWindow(300, 315): [
('k', ['last']),
],
}
assert_that(
records,
equal_to_per_window(expected_window_to_elements),
label='assert per window')
p.run()
def test_gbk_execution_after_watermark_trigger(self):
test_stream = (TestStream()
.advance_watermark_to(10)
.add_elements([TimestampedValue('a', 11)])
.advance_watermark_to(20)
.add_elements([TimestampedValue('b', 21)])
.advance_watermark_to_infinity()) # yapf: disable
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
p = TestPipeline(options=options)
records = (
p # pylint: disable=unused-variable
| test_stream
| beam.WindowInto(
FixedWindows(15),
trigger=trigger.AfterWatermark(early=trigger.AfterCount(1)),
accumulation_mode=trigger.AccumulationMode.DISCARDING)
| beam.Map(lambda x: ('k', x))
| beam.GroupByKey())
# TODO(BEAM-2519): timestamp assignment for elements from a GBK should
# respect the TimestampCombiner. The test below should also verify the
# timestamps of the outputted elements once this is implemented.
# assert per window
expected_window_to_elements = {
window.IntervalWindow(0, 15): [('k', ['a']), ('k', [])],
window.IntervalWindow(15, 30): [('k', ['b']), ('k', [])],
}
assert_that(
records,
equal_to_per_window(expected_window_to_elements),
label='assert per window')
p.run()
def test_gbk_execution_after_processing_trigger_fired(self):
"""Advance TestClock to (X + delta) and see the pipeline does finish."""
# TODO(mariagh): Add test_gbk_execution_after_processing_trigger_unfired
# Advance TestClock to (X + delta) and see the pipeline does finish
# Possibly to the framework trigger_transcripts.yaml
test_stream = (TestStream()
.advance_watermark_to(10)
.add_elements(['a'])
.advance_processing_time(5.1)
.advance_watermark_to_infinity()) # yapf: disable
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
options.view_as(TypeOptions).allow_unsafe_triggers = True
p = TestPipeline(options=options)
records = (
p
| test_stream
| beam.WindowInto(
beam.window.FixedWindows(15),
trigger=trigger.AfterProcessingTime(5),
accumulation_mode=trigger.AccumulationMode.DISCARDING)
| beam.Map(lambda x: ('k', x))
| beam.GroupByKey())
# TODO(BEAM-2519): timestamp assignment for elements from a GBK should
# respect the TimestampCombiner. The test below should also verify the
# timestamps of the outputted elements once this is implemented.
expected_window_to_elements = {
window.IntervalWindow(0, 15): [('k', ['a'])],
}
assert_that(
records,
equal_to_per_window(expected_window_to_elements),
label='assert per window')
p.run()
def test_basic_execution_batch_sideinputs(self):
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
p = TestPipeline(options=options)
main_stream = (p
| 'main TestStream' >> TestStream()
.advance_watermark_to(10)
.add_elements(['e'])
.advance_watermark_to_infinity()) # yapf: disable
side = (
p
| beam.Create([2, 1, 4])
| beam.Map(lambda t: window.TimestampedValue(t, t)))
class RecordFn(beam.DoFn):
def process(
self,
elm=beam.DoFn.ElementParam,
ts=beam.DoFn.TimestampParam,
side=beam.DoFn.SideInputParam):
yield (elm, ts, sorted(side))
records = (
main_stream # pylint: disable=unused-variable
| beam.ParDo(RecordFn(), beam.pvalue.AsList(side)))
assert_that(records, equal_to([('e', Timestamp(10), [1, 2, 4])]))
p.run()
def test_basic_execution_sideinputs(self):
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
with TestPipeline(options=options) as p:
test_stream = (p | TestStream()
.advance_watermark_to(0, tag='side')
.advance_watermark_to(10, tag='main')
.add_elements(['e'], tag='main')
.add_elements([window.TimestampedValue(2, 2)], tag='side')
.add_elements([window.TimestampedValue(1, 1)], tag='side')
.add_elements([window.TimestampedValue(7, 7)], tag='side')
.add_elements([window.TimestampedValue(4, 4)], tag='side')
) # yapf: disable
main_stream = test_stream['main']
side_stream = test_stream['side']
class RecordFn(beam.DoFn):
def process(
self,
elm=beam.DoFn.ElementParam,
ts=beam.DoFn.TimestampParam,
side=beam.DoFn.SideInputParam):
yield (elm, ts, side)
records = (
main_stream # pylint: disable=unused-variable
| beam.ParDo(RecordFn(), beam.pvalue.AsList(side_stream)))
assert_that(records, equal_to([('e', Timestamp(10), [2, 1, 7, 4])]))
def test_basic_execution_batch_sideinputs_fixed_windows(self):
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
p = TestPipeline(options=options)
main_stream = (
p
|
'main TestStream' >> TestStream().advance_watermark_to(2).add_elements(
['a']).advance_watermark_to(4).add_elements(
['b']).advance_watermark_to_infinity()
| 'main window' >> beam.WindowInto(window.FixedWindows(1)))
side = (
p
| beam.Create([2, 1, 4])
| beam.Map(lambda t: window.TimestampedValue(t, t))
| beam.WindowInto(window.FixedWindows(2)))
class RecordFn(beam.DoFn):
def process(
self,
elm=beam.DoFn.ElementParam,
ts=beam.DoFn.TimestampParam,
side=beam.DoFn.SideInputParam):
yield (elm, ts, side)
records = (
main_stream # pylint: disable=unused-variable
| beam.ParDo(RecordFn(), beam.pvalue.AsList(side)))
# assert per window
expected_window_to_elements = {
window.IntervalWindow(2, 3): [('a', Timestamp(2), [2])],
window.IntervalWindow(4, 5): [('b', Timestamp(4), [4])]
}
assert_that(
records,
equal_to_per_window(expected_window_to_elements),
label='assert per window')
p.run()
def test_basic_execution_sideinputs_fixed_windows(self):
options = PipelineOptions()
options.view_as(StandardOptions).streaming = True
p = TestPipeline(options=options)
test_stream = (p | TestStream()
.advance_watermark_to(12, tag='side')
.add_elements([window.TimestampedValue('s1', 10)], tag='side')
.advance_watermark_to(20, tag='side')
.add_elements([window.TimestampedValue('s2', 20)], tag='side')
.advance_watermark_to(9, tag='main')
.add_elements(['a1', 'a2', 'a3', 'a4'], tag='main')
.add_elements(['b'], tag='main')
.advance_watermark_to(18, tag='main')
.add_elements('c', tag='main')
) # yapf: disable
main_stream = (
test_stream['main']
| 'main windowInto' >> beam.WindowInto(window.FixedWindows(1)))
side_stream = (
test_stream['side']
| 'side windowInto' >> beam.WindowInto(window.FixedWindows(3)))
class RecordFn(beam.DoFn):
def process(
self,
elm=beam.DoFn.ElementParam,
ts=beam.DoFn.TimestampParam,
side=beam.DoFn.SideInputParam):
yield (elm, ts, side)
records = (
main_stream # pylint: disable=unused-variable
| beam.ParDo(RecordFn(), beam.pvalue.AsList(side_stream)))
# assert per window
expected_window_to_elements = {
window.IntervalWindow(9, 10): [
('a1', Timestamp(9), ['s1']), ('a2', Timestamp(9), ['s1']),
('a3', Timestamp(9), ['s1']), ('a4', Timestamp(9), ['s1']),
('b', Timestamp(9), ['s1'])
],
window.IntervalWindow(18, 19): [('c', Timestamp(18), ['s2'])],
}
assert_that(
records,
equal_to_per_window(expected_window_to_elements),
label='assert per window')
p.run()
def test_roundtrip_proto(self):
test_stream = (TestStream()
.advance_processing_time(1)
.advance_watermark_to(2)
.add_elements([1, 2, 3])) # yapf: disable
p = TestPipeline(options=StandardOptions(streaming=True))
p | test_stream
pipeline_proto, context = p.to_runner_api(return_context=True)
for t in pipeline_proto.components.transforms.values():
if t.spec.urn == common_urns.primitives.TEST_STREAM.urn:
test_stream_proto = t
self.assertTrue(test_stream_proto)
roundtrip_test_stream = TestStream().from_runner_api(
test_stream_proto, context)
self.assertListEqual(test_stream._events, roundtrip_test_stream._events)
self.assertSetEqual(
test_stream.output_tags, roundtrip_test_stream.output_tags)
self.assertEqual(test_stream.coder, roundtrip_test_stream.coder)
def test_roundtrip_proto_multi(self):
test_stream = (TestStream()
.advance_processing_time(1)
.advance_watermark_to(2, tag='a')
.advance_watermark_to(3, tag='b')
.add_elements([1, 2, 3], tag='a')
.add_elements([4, 5, 6], tag='b')) # yapf: disable
options = StandardOptions(streaming=True)
p = TestPipeline(options=options)
p | test_stream
pipeline_proto, context = p.to_runner_api(return_context=True)
for t in pipeline_proto.components.transforms.values():
if t.spec.urn == common_urns.primitives.TEST_STREAM.urn:
test_stream_proto = t
self.assertTrue(test_stream_proto)
roundtrip_test_stream = TestStream().from_runner_api(
test_stream_proto, context)
self.assertListEqual(test_stream._events, roundtrip_test_stream._events)
self.assertSetEqual(
test_stream.output_tags, roundtrip_test_stream.output_tags)
self.assertEqual(test_stream.coder, roundtrip_test_stream.coder)
def test_basic_execution_with_service(self):
"""Tests that the TestStream can correctly read from an RPC service.
"""
coder = beam.coders.FastPrimitivesCoder()
test_stream_events = (TestStream(coder=coder)
.advance_watermark_to(10000)
.add_elements(['a', 'b', 'c'])
.advance_watermark_to(20000)
.add_elements(['d'])
.add_elements(['e'])
.advance_processing_time(10)
.advance_watermark_to(300000)
.add_elements([TimestampedValue('late', 12000)])
.add_elements([TimestampedValue('last', 310000)])
.advance_watermark_to_infinity())._events # yapf: disable
test_stream_proto_events = [
e.to_runner_api(coder) for e in test_stream_events
]
class InMemoryEventReader:
def read_multiple(self, unused_keys):
for e in test_stream_proto_events:
yield e
service = TestStreamServiceController(reader=InMemoryEventReader())
service.start()
test_stream = TestStream(coder=coder, endpoint=service.endpoint)
class RecordFn(beam.DoFn):
def process(
self,
element=beam.DoFn.ElementParam,
timestamp=beam.DoFn.TimestampParam):
yield (element, timestamp)
options = StandardOptions(streaming=True)
p = TestPipeline(options=options)
my_record_fn = RecordFn()
records = p | test_stream | beam.ParDo(my_record_fn)
assert_that(
records,
equal_to([
('a', timestamp.Timestamp(10)),
('b', timestamp.Timestamp(10)),
('c', timestamp.Timestamp(10)),
('d', timestamp.Timestamp(20)),
('e', timestamp.Timestamp(20)),
('late', timestamp.Timestamp(12)),
('last', timestamp.Timestamp(310)),
]))
p.run()
class ReverseTestStreamTest(unittest.TestCase):
def test_basic_execution(self):
test_stream = (TestStream()
.advance_watermark_to(0)
.advance_processing_time(5)
.add_elements(['a', 'b', 'c'])
.advance_watermark_to(2)
.advance_processing_time(1)
.advance_watermark_to(4)
.advance_processing_time(1)
.advance_watermark_to(6)
.advance_processing_time(1)
.advance_watermark_to(8)
.advance_processing_time(1)
.advance_watermark_to(10)
.advance_processing_time(1)
.add_elements([TimestampedValue('1', 15),
TimestampedValue('2', 15),
TimestampedValue('3', 15)])) # yapf: disable
options = StandardOptions(streaming=True)
p = TestPipeline(options=options)
records = (
p
| test_stream
| ReverseTestStream(sample_resolution_sec=1, output_tag=None))
assert_that(
records,
equal_to_per_window({
beam.window.GlobalWindow(): [
[ProcessingTimeEvent(5), WatermarkEvent(0)],
[
ElementEvent([
TimestampedValue('a', 0),
TimestampedValue('b', 0),
TimestampedValue('c', 0)
])
],
[ProcessingTimeEvent(1), WatermarkEvent(2000000)],
[ProcessingTimeEvent(1), WatermarkEvent(4000000)],
[ProcessingTimeEvent(1), WatermarkEvent(6000000)],
[ProcessingTimeEvent(1), WatermarkEvent(8000000)],
[ProcessingTimeEvent(1), WatermarkEvent(10000000)],
[
ElementEvent([
TimestampedValue('1', 15),
TimestampedValue('2', 15),
TimestampedValue('3', 15)
])
],
],
}))
p.run()
def test_windowing(self):
test_stream = (TestStream()
.advance_watermark_to(0)
.add_elements(['a', 'b', 'c'])
.advance_processing_time(1)
.advance_processing_time(1)
.advance_processing_time(1)
.advance_processing_time(1)
.advance_processing_time(1)
.advance_watermark_to(5)
.add_elements(['1', '2', '3'])
.advance_processing_time(1)
.advance_watermark_to(6)
.advance_processing_time(1)
.advance_watermark_to(7)
.advance_processing_time(1)
.advance_watermark_to(8)
.advance_processing_time(1)
.advance_watermark_to(9)
.advance_processing_time(1)
.advance_watermark_to(10)
.advance_processing_time(1)
.advance_watermark_to(11)
.advance_processing_time(1)
.advance_watermark_to(12)
.advance_processing_time(1)
.advance_watermark_to(13)
.advance_processing_time(1)
.advance_watermark_to(14)
.advance_processing_time(1)
.advance_watermark_to(15)
.advance_processing_time(1)
) # yapf: disable
options = StandardOptions(streaming=True)
p = TestPipeline(options=options)
records = (
p
| test_stream
| 'letter windows' >> beam.WindowInto(
FixedWindows(5),
accumulation_mode=trigger.AccumulationMode.DISCARDING)
| 'letter with key' >> beam.Map(lambda x: ('k', x))
| 'letter gbk' >> beam.GroupByKey()
| ReverseTestStream(sample_resolution_sec=1, output_tag=None))
assert_that(
records,
equal_to_per_window({
beam.window.GlobalWindow(): [
[ProcessingTimeEvent(5), WatermarkEvent(4999998)],
[
ElementEvent(
[TimestampedValue(('k', ['a', 'b', 'c']), 4.999999)])
],
[ProcessingTimeEvent(1), WatermarkEvent(5000000)],
[ProcessingTimeEvent(1), WatermarkEvent(6000000)],
[ProcessingTimeEvent(1), WatermarkEvent(7000000)],
[ProcessingTimeEvent(1), WatermarkEvent(8000000)],
[ProcessingTimeEvent(1), WatermarkEvent(9000000)],
[
ElementEvent(
[TimestampedValue(('k', ['1', '2', '3']), 9.999999)])
],
[ProcessingTimeEvent(1), WatermarkEvent(10000000)],
[ProcessingTimeEvent(1), WatermarkEvent(11000000)],
[ProcessingTimeEvent(1), WatermarkEvent(12000000)],
[ProcessingTimeEvent(1), WatermarkEvent(13000000)],
[ProcessingTimeEvent(1), WatermarkEvent(14000000)],
[ProcessingTimeEvent(1), WatermarkEvent(15000000)],
],
}))
p.run()
def test_basic_execution_in_records_format(self):
test_stream = (TestStream()
.advance_watermark_to(0)
.advance_processing_time(5)
.add_elements(['a', 'b', 'c'])
.advance_watermark_to(2)
.advance_processing_time(1)
.advance_watermark_to(4)
.advance_processing_time(1)
.advance_watermark_to(6)
.advance_processing_time(1)
.advance_watermark_to(8)
.advance_processing_time(1)
.advance_watermark_to(10)
.advance_processing_time(1)
.add_elements([TimestampedValue('1', 15),
TimestampedValue('2', 15),
TimestampedValue('3', 15)])) # yapf: disable
options = StandardOptions(streaming=True)
p = TestPipeline(options=options)
coder = beam.coders.FastPrimitivesCoder()
records = (
p
| test_stream
| ReverseTestStream(
sample_resolution_sec=1,
coder=coder,
output_format=OutputFormat.TEST_STREAM_FILE_RECORDS,
output_tag=None)
| 'stringify' >> beam.Map(str))
assert_that(
records,
equal_to_per_window({
beam.window.GlobalWindow(): [
str(TestStreamFileHeader()),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
processing_time_event=TestStreamPayload.Event.
AdvanceProcessingTime(advance_duration=5000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
watermark_event=TestStreamPayload.Event.
AdvanceWatermark(new_watermark=0)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
element_event=TestStreamPayload.Event.AddElements(
elements=[
TestStreamPayload.TimestampedElement(
encoded_element=coder.encode('a'),
timestamp=0),
TestStreamPayload.TimestampedElement(
encoded_element=coder.encode('b'),
timestamp=0),
TestStreamPayload.TimestampedElement(
encoded_element=coder.encode('c'),
timestamp=0),
])))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
watermark_event=TestStreamPayload.Event.
AdvanceWatermark(new_watermark=2000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
processing_time_event=TestStreamPayload.Event.
AdvanceProcessingTime(advance_duration=1000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
watermark_event=TestStreamPayload.Event.
AdvanceWatermark(new_watermark=4000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
processing_time_event=TestStreamPayload.Event.
AdvanceProcessingTime(advance_duration=1000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
watermark_event=TestStreamPayload.Event.
AdvanceWatermark(new_watermark=6000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
processing_time_event=TestStreamPayload.Event.
AdvanceProcessingTime(advance_duration=1000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
watermark_event=TestStreamPayload.Event.
AdvanceWatermark(new_watermark=8000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
processing_time_event=TestStreamPayload.Event.
AdvanceProcessingTime(advance_duration=1000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
watermark_event=TestStreamPayload.Event.
AdvanceWatermark(new_watermark=10000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
processing_time_event=TestStreamPayload.Event.
AdvanceProcessingTime(advance_duration=1000000)))),
str(
TestStreamFileRecord(
recorded_event=TestStreamPayload.Event(
element_event=TestStreamPayload.Event.AddElements(
elements=[
TestStreamPayload.TimestampedElement(
encoded_element=coder.encode('1'),
timestamp=15000000),
TestStreamPayload.TimestampedElement(
encoded_element=coder.encode('2'),
timestamp=15000000),
TestStreamPayload.TimestampedElement(
encoded_element=coder.encode('3'),
timestamp=15000000),
])))),
],
}))
p.run()
if __name__ == '__main__':
unittest.main()
| lukecwik/incubator-beam | sdks/python/apache_beam/testing/test_stream_test.py | Python | apache-2.0 | 38,080 | 0.002784 |
# -*- coding: utf8 -*-
import subprocess
import os
from pathlib import Path
cwd = os.getcwd()
try:
print(os.getcwd())
subprocess.call(['make'])
# res = subprocess.check_output('uname -a',shell=True)
res = subprocess.check_output(
r"./darknet detector test cfg/coco.data cfg/yolo.cfg yolo.weights /home/zaki/NoooDemo/0001.jpg", shell=True)
except Exception as ex:
print(ex)
finally:
os.chdir(cwd)
print(res)
def main() -> None:
pass
if __name__ == '__main__':
main()
| umyuu/Sample | src/Python3/Q113190/exsample.py | Python | mit | 512 | 0.007813 |
#!/usr/bin/env python
import sys, json, csv, pprint
patrons = []
with open(sys.argv[1]) as csvfile:
csvrows = csv.DictReader(csvfile)
for row in csvrows:
name = row["Name"]
pledge = float(row["Pledge $"].replace("$",""))
lifetime = float(row["Lifetime $"].replace("$",""))
status = row["Patron Status"]
details = row["Additional Details"]
since = row["Patronage Since Date"]
if details != "":
name = details
if status == "Active patron":
if lifetime > 0 and pledge >= 5:
patrons += [(name, lifetime, since)]
patreons = sorted(patrons, key=lambda x: x[2])
for (name, lifetime, since) in patreons:
print "* {}".format(name)
| OverTheWireOrg/OverTheWire-website | patreon/patreon.py | Python | mit | 745 | 0.004027 |
import numpy as np
import env
from base import BaseEstimator
from utils import RNG, print_inline, width_format, Stopwatch
from layers import FullyConnected
from activations import sigmoid
class RBM(BaseEstimator):
"""
Examples
--------
>>> X = RNG(seed=1337).rand(32, 256)
>>> rbm = RBM(n_hidden=100,
... k=4,
... batch_size=2,
... n_epochs=50,
... learning_rate='0.05->0.005',
... momentum='0.5->0.9',
... verbose=True,
... early_stopping=5,
... random_seed=1337)
>>> rbm
RBM(W=None, batch_size=2, best_W=None, best_epoch=None, best_hb=None,
best_recon=inf, best_vb=None, early_stopping=5, epoch=0, hb=None, k=4,
learning_rate='0.05->0.005', momentum='0.5->0.9', n_epochs=50,
n_hidden=100, persistent=True, random_seed=1337, vb=None, verbose=True)
"""
def __init__(self, n_hidden=256, persistent=True, k=1,
batch_size=10, n_epochs=10, learning_rate=0.1, momentum=0.9,
early_stopping=None, verbose=False, random_seed=None):
self.n_hidden = n_hidden
self.persistent = persistent
self.k = k # k in CD-k / PCD-k
self.batch_size = batch_size
self.n_epochs = n_epochs
self.learning_rate = learning_rate
self._learning_rate = None
self.momentum = momentum
self._momentum = None
self.early_stopping = early_stopping
self._early_stopping = self.early_stopping
self.verbose = verbose
self.random_seed = random_seed
self.W = None
self.vb = None # visible units bias
self.hb = None # hidden units bias
self.epoch = 0
self.best_W = None
self.best_vb = None
self.best_hb = None
self.best_epoch = None
self.best_recon = np.inf
self._dW = None
self._dvb = None
self._dhb = None
self._rng = None
self._persistent = None
self._initialized = False
super(RBM, self).__init__(_y_required=False)
def propup(self, v):
"""Propagate visible units activation upwards to the hidden units."""
z = np.dot(v, self.W) + self.hb
return sigmoid(z)
def sample_h_given_v(self, v0_sample):
"""Infer state of hidden units given visible units."""
h1_mean = self.propup(v0_sample)
h1_sample = self._rng.binomial(size=h1_mean.shape, n=1, p=h1_mean)
return h1_mean, h1_sample
def propdown(self, h):
"""Propagate hidden units activation downwards to the visible units."""
z = np.dot(h, self.W.T) + self.vb
return sigmoid(z)
def sample_v_given_h(self, h0_sample):
"""Infer state of visible units given hidden units."""
v1_mean = self.propdown(h0_sample)
v1_sample = self._rng.binomial(size=v1_mean.shape, n=1, p=v1_mean)
return v1_mean, v1_sample
def gibbs_hvh(self, h0_sample):
"""Performs a step of Gibbs sampling starting from the hidden units."""
v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
return v1_mean, v1_sample, h1_mean, h1_sample
def gibbs_vhv(self, v0_sample):
"""Performs a step of Gibbs sampling starting from the visible units."""
raise NotImplementedError()
def free_energy(self, v_sample):
"""Function to compute the free energy."""
raise NotImplementedError()
def update(self, X_batch):
# compute positive phase
ph_mean, ph_sample = self.sample_h_given_v(X_batch)
# decide how to initialize chain
if self._persistent is not None:
chain_start = self._persistent
else:
chain_start = ph_sample
# gibbs sampling
for step in xrange(self.k):
nv_means, nv_samples, \
nh_means, nh_samples = self.gibbs_hvh(chain_start if step == 0 else nh_samples)
# update weights
self._dW = self._momentum * self._dW + \
np.dot(X_batch.T, ph_mean) - np.dot(nv_samples.T, nh_means)
self._dvb = self._momentum * self._dvb +\
np.mean(X_batch - nv_samples, axis=0)
self._dhb = self._momentum * self._dhb +\
np.mean(ph_mean - nh_means, axis=0)
self.W += self._learning_rate * self._dW
self.vb += self._learning_rate * self._dvb
self.hb += self._learning_rate * self._dhb
# remember state if needed
if self.persistent:
self._persistent = nh_samples
return np.mean(np.square(X_batch - nv_means))
def batch_iter(self, X):
n_batches = len(X) / self.batch_size
for i in xrange(n_batches):
start = i * self.batch_size
end = start + self.batch_size
X_batch = X[start:end]
yield X_batch
if n_batches * self.batch_size < len(X):
yield X[end:]
def train_epoch(self, X):
mean_recons = []
for i, X_batch in enumerate(self.batch_iter(X)):
mean_recons.append(self.update(X_batch))
if self.verbose and i % (len(X)/(self.batch_size * 16)) == 0:
print_inline('.')
if self.verbose: print_inline(' ')
return np.mean(mean_recons)
def _fit(self, X):
if not self._initialized:
layer = FullyConnected(self.n_hidden,
bias=0.,
random_seed=self.random_seed)
layer.setup_weights(X.shape)
self.W = layer.W
self.vb = np.zeros(X.shape[1])
self.hb = layer.b
self._dW = np.zeros_like(self.W)
self._dvb = np.zeros_like(self.vb)
self._dhb = np.zeros_like(self.hb)
self._rng = RNG(self.random_seed)
self._rng.reseed()
timer = Stopwatch(verbose=False).start()
for _ in xrange(self.n_epochs):
self.epoch += 1
if self.verbose:
print_inline('Epoch {0:>{1}}/{2} '.format(self.epoch, len(str(self.n_epochs)), self.n_epochs))
if isinstance(self.learning_rate, str):
S, F = map(float, self.learning_rate.split('->'))
self._learning_rate = S + (F - S) * (1. - np.exp(-(self.epoch - 1.)/8.)) / (
1. - np.exp(-(self.n_epochs - 1.)/8.))
else:
self._learning_rate = self.learning_rate
if isinstance(self.momentum, str):
S, F = map(float, self.momentum.split('->'))
self._momentum = S + (F - S) * (1. - np.exp(-(self.epoch - 1)/4.)) / (
1. - np.exp(-(self.n_epochs - 1)/4.))
else:
self._momentum = self.momentum
mean_recon = self.train_epoch(X)
if mean_recon < self.best_recon:
self.best_recon = mean_recon
self.best_epoch = self.epoch
self.best_W = self.W.copy()
self.best_vb = self.vb.copy()
self.best_hb = self.hb.copy()
self._early_stopping = self.early_stopping
msg = 'elapsed: {0} sec'.format(width_format(timer.elapsed(), default_width=5, max_precision=2))
msg += ' - recon. mse: {0}'.format(width_format(mean_recon, default_width=6, max_precision=4))
msg += ' - best r-mse: {0}'.format(width_format(self.best_recon, default_width=6, max_precision=4))
if self.early_stopping:
msg += ' {0}*'.format(self._early_stopping)
if self.verbose:
print msg
if self._early_stopping == 0:
return
if self.early_stopping:
self._early_stopping -= 1
def _serialize(self, params):
for attr in ('W', 'best_W',
'vb', 'best_vb',
'hb', 'best_hb'):
if attr in params and params[attr] is not None:
params[attr] = params[attr].tolist()
return params
def _deserialize(self, params):
for attr in ('W', 'best_W',
'vb', 'best_vb',
'hb', 'best_hb'):
if attr in params and params[attr] is not None:
params[attr] = np.asarray(params[attr])
return params
if __name__ == '__main__':
# run corresponding tests
from utils.testing import run_tests
run_tests(__file__)
| monsta-hd/ml-mnist | ml_mnist/nn/rbm.py | Python | mit | 8,597 | 0.002326 |
from myhdl import *
class VGA:
def __init__(self, color_depth=(10, 10, 10,)):
"""
color_depth the number of bits per RGB
"""
self.N = color_depth
# the sync signals
self.hsync = Signal(bool(1))
self.vsync = Signal(bool(1))
# the RGB signals to the video
cd = color_depth
self.red = Signal(intbv(0)[cd[0]:])
self.green = Signal(intbv(0)[cd[1]:])
self.blue = Signal(intbv(0)[cd[2]:])
# logic VGA timing signals, used internally only
self.pxlen = Signal(bool(0))
self.active = Signal(bool(0))
# @todo: move this to the `vga_sync` this is specific to the
# VGA driver and not the intefaces ???
# these are used for verification and debug only.
self.states = enum('NONE', 'ACTIVE',
'HOR_FRONT_PORCH', 'HSYNC', 'HOR_BACK_PORCH',
'VER_FRONT_PORCH', 'VSYNC', 'VER_BACK_PORCH')
self.state = Signal(self.states.ACTIVE)
def assign(self, hsync, vsync, red, green, blue, pxlen=None, active=None):
""" in some cases discrete signals are connected """
self.hsync = hsync
self.vsync = vsync
self.red = red
self.green = green
self.blue = blue
if pxlen is not None:
self.pxlen = pxlen
if pxlen is not None:
self.active = active
| cfelton/rhea | rhea/cores/video/vga/vga_intf.py | Python | mit | 1,435 | 0 |
f = open('input.txt')
triangles = [map(int,l.split()) for l in f.readlines()]
possible = 0
for t in triangles:
t.sort()
if t[0] + t[1] > t[2]:
possible += 1
print(possible)
| pwicks86/adventofcode2016 | day03/p1.py | Python | mit | 190 | 0.010526 |
from __future__ import print_function
from troposphere import (
Template, Parameter, Ref, Condition, Equals, And, Or, Not, If
)
from troposphere import ec2
parameters = {
"One": Parameter(
"One",
Type="String",
),
"Two": Parameter(
"Two",
Type="String",
),
"Three": Parameter(
"Three",
Type="String",
),
"Four": Parameter(
"Four",
Type="String",
),
"SshKeyName": Parameter(
"SshKeyName",
Type="String",
)
}
conditions = {
"OneEqualsFoo": Equals(
Ref("One"),
"Foo"
),
"NotOneEqualsFoo": Not(
Condition("OneEqualsFoo")
),
"BarEqualsTwo": Equals(
"Bar",
Ref("Two")
),
"ThreeEqualsFour": Equals(
Ref("Three"),
Ref("Four")
),
"OneEqualsFooOrBarEqualsTwo": Or(
Condition("OneEqualsFoo"),
Condition("BarEqualsTwo")
),
"OneEqualsFooAndNotBarEqualsTwo": And(
Condition("OneEqualsFoo"),
Not(Condition("BarEqualsTwo"))
),
"OneEqualsFooAndBarEqualsTwoAndThreeEqualsPft": And(
Condition("OneEqualsFoo"),
Condition("BarEqualsTwo"),
Equals(Ref("Three"), "Pft")
),
"OneIsQuzAndThreeEqualsFour": And(
Equals(Ref("One"), "Quz"),
Condition("ThreeEqualsFour")
),
"LaunchInstance": And(
Condition("OneEqualsFoo"),
Condition("NotOneEqualsFoo"),
Condition("BarEqualsTwo"),
Condition("OneEqualsFooAndNotBarEqualsTwo"),
Condition("OneIsQuzAndThreeEqualsFour")
),
"LaunchWithGusto": And(
Condition("LaunchInstance"),
Equals(Ref("One"), "Gusto")
)
}
resources = {
"Ec2Instance": ec2.Instance(
"Ec2Instance",
Condition="LaunchInstance",
ImageId=If("ConditionNameEqualsFoo", "ami-12345678", "ami-87654321"),
InstanceType="t1.micro",
KeyName=Ref("SshKeyName"),
SecurityGroups=["default"],
)
}
def template():
t = Template()
for p in parameters.values():
t.add_parameter(p)
for k in conditions:
t.add_condition(k, conditions[k])
for r in resources.values():
t.add_resource(r)
return t
print(template().to_json())
| wangqiang8511/troposphere | examples/EC2Conditions.py | Python | bsd-2-clause | 2,279 | 0 |
"""Root URL definitions."""
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = [
url(r'^accounts/', include('accounts.urls', namespace='accounts')),
url(r'^admin/', admin.site.urls),
url(r'^', include('core.urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| knyghty/bord | bord/urls.py | Python | mit | 413 | 0 |
from Spider import *
from config import *
class run():
def __init__(self):
pass
#获取所有列表
def get_list(self):
list = cfg.getType()
for l in list.keys():
print(l)
return list
#获取用户选择的分类
def input_type(self):
start = spider(url)
list = self.get_list()
#type = input('输入要选择下载的类型分类(复制一下就好):\n')
type = '亚洲无码'
if type in cfg.getType() and type == '亚洲无码':
print('有')
newDir = start.newDir(type)
listLink = list[type]
#攻取列表链接
oneList = start.openLink(listLink,type)
# 开始解析内容页面
#oneContent = start.getContent(oneList,type)
elif type in cfg.getType() and type == '成人小说':
pass
else :
print('没有或者暂时不支持此类型下载')
if __name__ == '__main__':
cfg = config()
url = cfg.url
a = run()
a.input_type()
| hitaian/dogSpider | runSpider.py | Python | bsd-2-clause | 1,090 | 0.013889 |
#!/usr/bin/env python
from setuptools import setup, find_packages
__doc__ = """
Falsify data
"""
version = '0.0.1'
setup(name='perjury',
version=version,
description=__doc__,
author='Aaron Merriam',
author_email='[email protected]',
keywords='content',
long_description=__doc__,
url='https://github.com/aaronmerriam/foundry',
packages=find_packages(),
platforms="any",
license='BSD',
test_suite='tests',
classifiers=[
'Development Status :: 3 - Alpha',
'Natural Language :: English',
],
)
| pipermerriam/perjury | setup.py | Python | bsd-2-clause | 565 | 0.021239 |
# coding: utf8
"""
1. 每个文档自己训练一个Doc2Vec
2. 所有文档一起训练一个Doc2Vec
"""
import json
from gensim.models import Doc2Vec
from gensim.models.doc2vec import TaggedDocument
from stst.modules.features import Feature
from stst import utils
from stst import config
from stst.data import dict_utils
from stst.libs.kernel import vector_kernel as vk
class Doc2VecFeature(Feature):
def extract_instances(self, train_instances):
sentences = []
for idx, train_instance in enumerate(train_instances):
sa, sb = train_instance.get_word(type='lemma', lower=True)
sentences.append(TaggedDocument(words=sa, tags=['sa_%d' % idx]))
sentences.append(TaggedDocument(words=sb, tags=['sb_%d' % idx]))
model = Doc2Vec(sentences, size=25, window=3, min_count=0, workers=10, iter=1000)
features = []
infos = []
for idx in range(len(train_instances)):
vec_a = model.docvecs['sa_%d' % idx]
vec_b = model.docvecs['sb_%d' % idx]
feature, info = vk.get_all_kernel(vec_a, vec_b)
features.append(feature)
infos.append([])
# infos.append([vec_a, vec_b])
return features, infos
# def load_instances(self, train_instances):
# """
# extract cosine distance from already trained feature file
# without modify the feature_file
# this function's priority is higher that the above extract_instances
# """
#
# _features, _n_dim, _n_instance = Feature.load_feature_from_file(self.feature_file)
# features = []
# infos = []
# ''' get features from train instances'''
# for _feature in _features:
# feature = Feature._feat_string_to_list(_feature, _n_dim)
# features.append([feature[1]])
# infos.append(['cosine'])
#
# features = [ Feature._feat_list_to_string(feature) for feature in features ]
#
# return features, 1, _n_instance
class Doc2VecGlobalFeature(Feature):
def __init__(self, **kwargs):
super(Doc2VecGlobalFeature, self).__init__(**kwargs)
def extract_instances(self, train_instances):
model = dict_utils.DictLoader().load_doc2vec()
file_name = self.train_file.split('/')[-1]
features = []
infos = []
for idx in range(len(train_instances)):
vec_a = model.docvecs['%s_%d_sa' % (file_name, idx)]
vec_b = model.docvecs['%s_%d_sb' % (file_name, idx)]
# train_instance = train_instances[idx]
# sa, sb = train_instance.get_word(type='lemma', stopwords=True, lower=True)
# vec_a = model.infer_vector(sa)
# vec_b = model.infer_vector(sb)
feature, info = vk.get_all_kernel(vec_a, vec_b)
features.append(feature)
infos.append(info)
return features, infos
class ICLRScoreFeature(Feature):
def __init__(self, nntype, **kwargs):
super(ICLRScoreFeature, self).__init__(**kwargs)
self.nntype = nntype
self.feature_name = self.feature_name + '-%s' % (nntype)
def extract_instances(self, train_instances):
features = []
infos = []
input_file = self.feature_file.split('/')[-2] + '.txt'
f_in = utils.create_read_file(config.NN_FEATURE_PATH + '/' + self.nntype + '/' + input_file)
for line in f_in:
line = line.strip()
obj = json.loads(line)
sc = obj[0] / 5.0
features.append([sc])
infos.append([])
print(len(features), features[0])
return features, infos
class ICLRVectorFeature(Feature):
def __init__(self, nntype, **kwargs):
super(ICLRVectorFeature, self).__init__(**kwargs)
self.nntype = nntype
self.feature_name = self.feature_name + '-%s' % (nntype)
def extract_instances(self, train_instances):
features = []
infos = []
input_file = self.feature_file.split('/')[-2] + '.txt'
f_in = utils.create_read_file(config.NN_FEATURE_PATH + '/' + self.nntype + '/' + input_file)
for line in f_in:
line = line.strip()
obj = json.loads(line)
emb1 = obj[1]
emb2 = obj[2]
emb1 = vk.normalize(emb1)
emb2 = vk.normalize(emb2)
feats, info = vk.get_all_kernel(emb1, emb2)
features.append(feats)
infos.append(info)
print(len(features), features[0], infos[0])
return features, infos
| rgtjf/Semantic-Texual-Similarity-Toolkits | stst/features/features_nn.py | Python | mit | 4,605 | 0.001533 |
"""Leetcode 796. Rotate String
Easy
URL: https://leetcode.com/problems/rotate-string/
We are given two strings, A and B.
A shift on A consists of taking string A and moving the leftmost character to
the rightmost position. For example, if A = 'abcde', then it will be 'bcdea'
after one shift on A. Return True if and only if A can become B after some
number of shifts on A.
Example 1:
Input: A = 'abcde', B = 'cdeab'
Output: true
Example 2:
Input: A = 'abcde', B = 'abced'
Output: false
Note:
A and B will have length at most 100.
"""
class SolutionStringConcatSubstring(object):
def rotateString(self, A, B):
"""
:type A: str
:type B: str
:rtype: bool
Time complexity: O(2n+2n*n)=O(n^2).
Space complexity:O(n).
"""
# Check if lengths are not equal.
if len(A) != len(B):
return False
# If rotate string, B is substring of concated string A + A.
AA = A + A
if B in AA:
return True
else:
return False
def main():
# Input: A = 'abcde', B = 'cdeab'
# Output: true
A = 'abcde'
B = 'cdeab'
print SolutionStringConcatSubstring().rotateString(A, B)
# Input: A = 'abcde', B = 'abced'
# Output: false
A = 'abcde'
B = 'abced'
print SolutionStringConcatSubstring().rotateString(A, B)
if __name__ == '__main__':
main()
| bowen0701/algorithms_data_structures | lc0796_rotate_string.py | Python | bsd-2-clause | 1,408 | 0.00071 |
# ==============================================================================
# Copyright [2013] [Kevin Carter]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import logging
import os
from bookofnova.info import __appname__ as appname
class NoLogLevelSet(Exception):
pass
class Logging(object):
def __init__(self, log_level, log_file=None):
self.log_level = log_level
self.log_file = log_file
def logger_setup(self):
"""
Setup logging for your application
"""
logger = logging.getLogger("%s" % (appname.upper()))
avail_level = {'DEBUG': logging.DEBUG,
'INFO': logging.INFO,
'CRITICAL': logging.CRITICAL,
'WARN': logging.WARN,
'ERROR': logging.ERROR}
_log_level = self.log_level.upper()
if _log_level in avail_level:
lvl = avail_level[_log_level]
logger.setLevel(lvl)
formatter = logging.Formatter("%(asctime)s - %(name)s:%(levelname)s ==>"
" %(message)s")
else:
raise NoLogLevelSet('I died because you did not set a known log level')
# Building Handeler
if self.log_file:
handler = logging.FileHandler(self.log_file)
else:
handler = logging.StreamHandler()
handler.setLevel(lvl)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def return_logfile(filename):
"""
Return a path for logging file.
IF "/var/log/" does not exist, or you dont have write permissions to
"/var/log/" the log file will be in your working directory
Check for ROOT user if not log to working directory
"""
if os.path.isfile(filename):
return filename
else:
user = os.getuid()
logname = ('%s' % filename)
if not user == 0:
logfile = logname
else:
if os.path.isdir('/var/log'):
log_loc = '/var/log'
else:
try:
os.mkdir('%s' % log_loc)
logfile = '%s/%s' % (log_loc, logname)
except Exception:
logfile = '%s' % logname
return logfile
def load_in(log_file=None, log_level='info', output=None):
"""
Load in the log handler. If output is not None, systen will use the default
Log facility.
"""
if not output:
if log_file:
_log_file = return_logfile(filename=log_file)
log = Logging(log_level=log_level, log_file=_log_file)
output = log.logger_setup()
else:
output = Logging(log_level=log_level).logger_setup()
return output
| cloudnull/bookofnova | bookofnova/logger.py | Python | apache-2.0 | 3,372 | 0.000593 |
#!/usr/bin/env python
from iris_sdk.models.maps.base_map import BaseMap
class CitiesMap(BaseMap):
result_count = None
cities = None | scottbarstow/iris-python | iris_sdk/models/maps/cities.py | Python | mit | 142 | 0.014085 |
# =============================================================================
# COPYRIGHT 2013 Brain Corporation.
# License under MIT license (see LICENSE file)
# =============================================================================
import doctest
import logging
import os
import pytest
import robustus
from robustus.detail import check_module_available
from robustus.detail.utility import run_shell, check_run_shell
import shutil
import subprocess
import tempfile
def test_doc_tests():
doctest.testmod(robustus, raise_on_error=True)
doctest.testmod(robustus.detail.utility, raise_on_error=True)
def test_run_shell():
def check(command, expected_ret_code, expected_output, verbose):
tf = tempfile.TemporaryFile('w+')
assert run_shell(command, shell=True, stdout=tf, verbose=verbose) == expected_ret_code
tf.seek(0)
assert tf.read() == expected_output
try:
exception_occured = False
check_run_shell(command, shell=True, verbose=verbose)
except subprocess.CalledProcessError:
exception_occured = True
assert exception_occured == (exception_occured != 0)
check('echo robustus', 0, 'robustus\n', verbose=True)
check('echo robustus', 0, 'robustus\n', verbose=False)
check('echo robustus && exit 1', 1, 'robustus\n', verbose=True)
check('echo robustus && exit 1', 1, 'robustus\n', verbose=False)
def test_robustus(tmpdir):
tmpdir.chdir()
test_env = 'test_env'
# create env and install some packages
logging.info('creating ' + test_env)
robustus.execute(['env', test_env])
assert os.path.isdir(test_env)
assert os.path.isfile(os.path.join(test_env, '.robustus'))
robustus_executable = os.path.join(test_env, 'bin/robustus')
assert os.path.isfile(robustus_executable)
# install some packages
logging.info('installing requirements into ' + test_env)
run_shell([robustus_executable, 'install', 'pyserial'])
test_requirements1 = 'test_requirements1.txt'
with open(test_requirements1, 'w') as file:
file.write('pep8==1.3.3\n')
file.write('pytest==2.3.5\n')
run_shell([robustus_executable, 'install', '-r', test_requirements1])
# check packages are installed
packages_to_check = ['pyserial', 'pep8==1.3.3', 'pytest==2.3.5']
with open('freezed_requirements.txt', 'w') as req_file:
run_shell([robustus_executable, 'freeze'], stdout=req_file)
with open('freezed_requirements.txt') as req_file:
installed_packages = [line.strip() for line in req_file]
for package in packages_to_check:
assert package in installed_packages
assert check_module_available(test_env, 'serial')
assert check_module_available(test_env, 'pep8')
assert check_module_available(test_env, 'pytest')
shutil.rmtree(test_env)
def create_editable_environment(tmpdir):
"""Create an environment with an editable (shared between some tests) and
chdir into it."""
base_dir = str(tmpdir.mkdir('test_perrepo_env'))
test_env = os.path.join(base_dir, 'env')
working_dir = os.path.join(base_dir, 'working_dir')
# create env and install some packages
logging.info('creating ' + test_env)
os.mkdir(working_dir)
os.chdir(working_dir)
os.system('git init .')
robustus.execute(['env', test_env])
os.chdir(working_dir)
robustus_executable = os.path.join(test_env, 'bin/robustus')
test_requirements = os.path.join(working_dir, 'requirements.txt')
with open(test_requirements, 'w') as file:
file.write('-e git+https://github.com/braincorp/robustus-test-repo.git@master#egg=ardrone\n')
run_shell([robustus_executable, 'install', '-r', test_requirements])
return working_dir, test_env, robustus_executable
def test_pereditable(tmpdir):
"""Create a package with some editable requirements and check
that perrepo runs as expected."""
working_dir, test_env, robustus_executable = create_editable_environment(tmpdir)
# Now check that robustus behaves as expected
run_shell([robustus_executable, 'perrepo', 'touch', 'foo'])
assert os.path.exists(os.path.join(working_dir, 'foo'))
assert os.path.exists(os.path.join(test_env, 'src', 'ardrone', 'foo'))
def test_reset(tmpdir):
"""Try reset the environment"""
working_dir, test_env, robustus_executable = create_editable_environment(tmpdir)
# Change a file in the repo and check it is reset
changed_filepath = os.path.join(test_env, 'src', 'ardrone', 'README')
original_content = open(changed_filepath, 'r').read()
f = open(changed_filepath, 'w')
f.write('junk')
f.close()
run_shell([robustus_executable, 'reset', '-f'])
assert original_content == open(changed_filepath, 'r').read()
def test_install_with_tag(tmpdir):
"""Create a package with some editable requirements and install using a tag."""
base_dir = str(tmpdir.mkdir('test_perrepo_env'))
test_env = os.path.join(base_dir, 'env')
working_dir = os.path.join(base_dir, 'working_dir')
# create env and install some packages
logging.info('creating ' + test_env)
os.mkdir(working_dir)
os.chdir(working_dir)
os.system('git init .')
robustus.execute(['env', test_env])
os.chdir(working_dir)
robustus_executable = os.path.join(test_env, 'bin/robustus')
test_requirements = os.path.join(working_dir, 'requirements.txt')
with open(test_requirements, 'w') as file:
file.write('-e git+https://github.com/braincorp/robustus-test-repo.git@master#egg=robustus-test-repo\n')
run_shell([robustus_executable, 'install', '--tag', 'test-tag', '-r', test_requirements])
# Now check that robustus behaves as expected
assert os.path.exists(os.path.join(test_env, 'src', 'robustus-test-repo', 'test-tag'))
def test_install_with_branch_testing(tmpdir):
"""Create a package with some editable requirements and install using a branch
and check that one repo with the branch gets checked out using the branch
and the other ends up on master (this is how testing is often done)."""
base_dir = str(tmpdir.mkdir('test_perrepo_env'))
test_env = os.path.join(base_dir, 'env')
working_dir = os.path.join(base_dir, 'working_dir')
# create env and install some packages
logging.info('creating ' + test_env)
os.mkdir(working_dir)
os.chdir(working_dir)
# creat a new local repo
os.system('git init .')
setup_file_content =\
'''
from setuptools import setup, find_packages
setup(
name='test_perrepo_env',
author='Brain Corporation',
author_email='[email protected]',
url='https://github.com/braincorp/test_perrepo_env',
long_description='',
version='dev',
packages=find_packages(),
include_package_data=True,
install_requires=[])
'''
setup_file = os.path.join(working_dir, 'setup.py')
with open(setup_file, 'w') as file:
file.write(setup_file_content)
test_requirements = os.path.join(working_dir, 'requirements.txt')
with open(test_requirements, 'w') as file:
file.write('-e git+https://github.com/braincorp/robustus-test-repo.git@master#egg=robustus-test-repo\nmock==0.8.0\n-e git+https://github.com/braincorp/filecacher.git@master#egg=filecacher\n')
os.system('git add setup.py')
os.system('git add requirements.txt')
os.system('git commit -am "setup and reqs"')
# create test branch
os.system('git checkout -b test-branch')
test_file_on_test_branch = os.path.join(working_dir, 'root_test_branch.file')
with open(test_file_on_test_branch, 'w') as file:
file.write('root test')
os.system('git add root_test_branch.file')
os.system('git commit -am "root_test_branch.file"')
os.system('git checkout master')
robustus.execute(['env', test_env])
os.chdir(working_dir)
robustus_executable = os.path.join(test_env, 'bin/robustus')
run_shell([robustus_executable, 'install', '-e', '.', '--tag', 'test-branch', '--ignore-missing-refs'], verbose = True)
# Now check that robustus behaves as expected
assert os.path.exists(os.path.join(test_env, 'src', 'robustus-test-repo', 'test_branch.file'))
assert os.path.exists(os.path.join(test_env, 'lib', 'python2.7', 'site-packages',
'python-ardrone.egg-link'))
assert os.path.exists(os.path.join(test_env, 'src', 'filecacher', 'requirements.txt'))
assert os.path.exists(os.path.join(test_env, 'lib', 'python2.7', 'site-packages',
'filecacher.egg-link'))
# Now check that the repo itself is on the test branch
assert os.path.exists(test_file_on_test_branch)
if __name__ == '__main__':
test_doc_tests()
pytest.main('-s %s -n0' % __file__)
| braincorp/robustus | robustus/tests/test_robustus.py | Python | mit | 8,887 | 0.006414 |
"""
MySQL database backend for Django.
Requires MySQLdb: http://sourceforge.net/projects/mysql-python
"""
from __future__ import unicode_literals
import datetime
import re
import sys
import warnings
try:
import MySQLdb as Database
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
# We want version (1, 2, 1, 'final', 2) or later. We can't just use
# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
# inadvertently passes the version test.
version = Database.version_info
if (version < (1, 2, 1) or (version[:3] == (1, 2, 1) and
(len(version) < 5 or version[3] != 'final' or version[4] < 2))):
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__)
from MySQLdb.converters import conversions, Thing2Literal
from MySQLdb.constants import FIELD_TYPE, CLIENT
try:
import pytz
except ImportError:
pytz = None
from django.conf import settings
from django.db import utils
from django.db.backends import (utils as backend_utils, BaseDatabaseFeatures,
BaseDatabaseOperations, BaseDatabaseWrapper)
from django.db.backends.mysql.client import DatabaseClient
from django.db.backends.mysql.creation import DatabaseCreation
from django.db.backends.mysql.introspection import DatabaseIntrospection
from django.db.backends.mysql.validation import DatabaseValidation
from django.utils.encoding import force_str, force_text
from django.db.backends.mysql.schema import DatabaseSchemaEditor
from django.utils.functional import cached_property
from django.utils.safestring import SafeBytes, SafeText
from django.utils import six
from django.utils import timezone
# Raise exceptions for database warnings if DEBUG is on
if settings.DEBUG:
warnings.filterwarnings("error", category=Database.Warning)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
# It's impossible to import datetime_or_None directly from MySQLdb.times
parse_datetime = conversions[FIELD_TYPE.DATETIME]
def parse_datetime_with_timezone_support(value):
dt = parse_datetime(value)
# Confirm that dt is naive before overwriting its tzinfo.
if dt is not None and settings.USE_TZ and timezone.is_naive(dt):
dt = dt.replace(tzinfo=timezone.utc)
return dt
def adapt_datetime_with_timezone_support(value, conv):
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
if settings.USE_TZ:
if timezone.is_naive(value):
warnings.warn("MySQL received a naive datetime (%s)"
" while time zone support is active." % value,
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S"), conv)
# MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like
# timedelta in terms of actual behavior as they are signed and include days --
# and Django expects time, so we still need to override that. We also need to
# add special handling for SafeText and SafeBytes as MySQLdb's type
# checking is too tight to catch those (see Django ticket #6052).
# Finally, MySQLdb always returns naive datetime objects. However, when
# timezone support is active, Django expects timezone-aware datetime objects.
django_conversions = conversions.copy()
django_conversions.update({
FIELD_TYPE.TIME: backend_utils.typecast_time,
FIELD_TYPE.DECIMAL: backend_utils.typecast_decimal,
FIELD_TYPE.NEWDECIMAL: backend_utils.typecast_decimal,
FIELD_TYPE.DATETIME: parse_datetime_with_timezone_support,
datetime.datetime: adapt_datetime_with_timezone_support,
})
# This should match the numerical portion of the version numbers (we can treat
# versions like 5.0.24 and 5.0.24a as the same). Based on the list of version
# at http://dev.mysql.com/doc/refman/4.1/en/news.html and
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
# point is to raise Warnings as exceptions, this can be done with the Python
# warning module, and this is setup when the connection is created, and the
# standard backend_utils.CursorDebugWrapper can be used. Also, using sql_mode
# TRADITIONAL will automatically cause most warnings to be treated as errors.
class CursorWrapper(object):
"""
A thin wrapper around MySQLdb's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
to the particular underlying representation returned by Connection.cursor().
"""
codes_for_integrityerror = (1048,)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
# args is None means no string interpolation
return self.cursor.execute(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Ticket #17671 - Close instead of passing thru to avoid backend
# specific behavior.
self.close()
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
update_can_self_select = False
allows_group_by_pk = True
related_fields_match_type = True
allow_sliced_subqueries = False
has_bulk_insert = True
has_select_for_update = True
has_select_for_update_nowait = False
supports_forward_references = False
# XXX MySQL DB-API drivers currently fail on binary data on Python 3.
supports_binary_field = six.PY2
supports_microsecond_precision = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
can_introspect_binary_field = False
can_introspect_boolean_field = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
allows_auto_pk_0 = False
uses_savepoints = True
atomic_transactions = False
supports_column_check_constraints = False
def __init__(self, connection):
super(DatabaseFeatures, self).__init__(connection)
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE INTROSPECT_TEST (X INT)')
# This command is MySQL specific; the second column
# will tell you the default table type of the created
# table. Since all Django's test tables will have the same
# table type, that's enough to evaluate the feature.
cursor.execute("SHOW TABLE STATUS WHERE Name='INTROSPECT_TEST'")
result = cursor.fetchone()
cursor.execute('DROP TABLE INTROSPECT_TEST')
return result[1]
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != 'MyISAM'
@cached_property
def has_zoneinfo_database(self):
# MySQL accepts full time zones names (eg. Africa/Nairobi) but rejects
# abbreviations (eg. EAT). When pytz isn't installed and the current
# time zone is LocalTimezone (the only sensible value in this
# context), the current time zone name will be an abbreviation. As a
# consequence, MySQL cannot perform time zone conversions reliably.
if pytz is None:
return False
# Test if the time zone definitions are installed.
with self.connection.cursor() as cursor:
cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
return cursor.fetchone() is not None
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.mysql.compiler"
# MySQL stores positive fields as UNSIGNED ints.
integer_field_ranges = dict(BaseDatabaseOperations.integer_field_ranges,
PositiveSmallIntegerField=(0, 4294967295),
PositiveIntegerField=(0, 18446744073709551615),
)
def date_extract_sql(self, lookup_type, field_name):
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
# Note: WEEKDAY() returns 0-6, Monday=0.
return "DAYOFWEEK(%s)" % field_name
else:
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql
def datetime_extract_sql(self, lookup_type, field_name, tzname):
if settings.USE_TZ:
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
params = [tzname]
else:
params = []
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
# Note: WEEKDAY() returns 0-6, Monday=0.
sql = "DAYOFWEEK(%s)" % field_name
else:
sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
return sql, params
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
if settings.USE_TZ:
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
params = [tzname]
else:
params = []
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql, params
def date_interval_sql(self, sql, connector, timedelta):
return "(%s %s INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND)" % (sql, connector,
timedelta.days, timedelta.seconds, timedelta.microseconds)
def drop_foreignkey_sql(self):
return "DROP FOREIGN KEY"
def force_no_ordering(self):
"""
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
columns. If no ordering would otherwise be applied, we don't want any
implicit sorting going on.
"""
return ["NULL"]
def fulltext_search_sql(self, field_name):
return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
def last_executed_query(self, cursor, sql, params):
# With MySQLdb, cursor objects have an (undocumented) "_last_executed"
# attribute where the exact query sent to the database is saved.
# See MySQLdb/cursors.py in the source distribution.
return force_text(getattr(cursor, '_last_executed', None), errors='replace')
def no_limit_value(self):
# 2**64 - 1, as recommended by the MySQL documentation
return 18446744073709551615
def quote_name(self, name):
if name.startswith("`") and name.endswith("`"):
return name # Quoting once is enough.
return "`%s`" % name
def random_function_sql(self):
return 'RAND()'
def sql_flush(self, style, tables, sequences, allow_cascade=False):
# NB: The generated SQL below is specific to MySQL
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
# to clear all tables of all data
if tables:
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
for table in tables:
sql.append('%s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
style.SQL_FIELD(self.quote_name(table)),
))
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def sequence_reset_by_name_sql(self, style, sequences):
# Truncate already resets the AUTO_INCREMENT field from
# MySQL version 5.0.13 onwards. Refs #16961.
if self.connection.mysql_version < (5, 0, 13):
return [
"%s %s %s %s %s;" % (
style.SQL_KEYWORD('ALTER'),
style.SQL_KEYWORD('TABLE'),
style.SQL_TABLE(self.quote_name(sequence['table'])),
style.SQL_KEYWORD('AUTO_INCREMENT'),
style.SQL_FIELD('= 1'),
) for sequence in sequences
]
else:
return []
def validate_autopk_value(self, value):
# MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
if value == 0:
raise ValueError('The database backend does not accept 0 as a '
'value for AutoField.')
return value
def value_to_db_datetime(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = value.astimezone(timezone.utc).replace(tzinfo=None)
else:
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
# MySQL doesn't support microseconds
return six.text_type(value.replace(microsecond=0))
def value_to_db_time(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware times
if timezone.is_aware(value):
raise ValueError("MySQL backend does not support timezone-aware times.")
# MySQL doesn't support microseconds
return six.text_type(value.replace(microsecond=0))
def year_lookup_bounds_for_datetime_field(self, value):
# Again, no microseconds
first, second = super(DatabaseOperations, self).year_lookup_bounds_for_datetime_field(value)
return [first.replace(microsecond=0), second.replace(microsecond=0)]
def max_name_length(self):
return 64
def bulk_insert_sql(self, fields, num_values):
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
return "VALUES " + ", ".join([items_sql] * num_values)
def combine_expression(self, connector, sub_expressions):
"""
MySQL requires special cases for ^ operators in query expressions
"""
if connector == '^':
return 'POW(%s)' % ','.join(sub_expressions)
return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'mysql'
operators = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE BINARY %s',
'icontains': 'LIKE %s',
'regex': 'REGEXP BINARY %s',
'iregex': 'REGEXP %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE BINARY %s',
'endswith': 'LIKE BINARY %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
Database = Database
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = DatabaseValidation(self)
def get_connection_params(self):
kwargs = {
'conv': django_conversions,
'charset': 'utf8',
}
if six.PY2:
kwargs['use_unicode'] = True
settings_dict = self.settings_dict
if settings_dict['USER']:
kwargs['user'] = settings_dict['USER']
if settings_dict['NAME']:
kwargs['db'] = settings_dict['NAME']
if settings_dict['PASSWORD']:
kwargs['passwd'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST'].startswith('/'):
kwargs['unix_socket'] = settings_dict['HOST']
elif settings_dict['HOST']:
kwargs['host'] = settings_dict['HOST']
if settings_dict['PORT']:
kwargs['port'] = int(settings_dict['PORT'])
# We need the number of potentially affected rows after an
# "UPDATE", not the number of changed rows.
kwargs['client_flag'] = CLIENT.FOUND_ROWS
kwargs.update(settings_dict['OPTIONS'])
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.encoders[SafeText] = conn.encoders[six.text_type]
conn.encoders[SafeBytes] = conn.encoders[bytes]
return conn
def init_connection_state(self):
with self.cursor() as cursor:
# SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column
# on a recently-inserted row will return when the field is tested for
# NULL. Disabling this value brings this aspect of MySQL in line with
# SQL standards.
cursor.execute('SET SQL_AUTO_IS_NULL = 0')
def create_cursor(self):
cursor = self.connection.cursor()
return CursorWrapper(cursor)
def _rollback(self):
try:
BaseDatabaseWrapper._rollback(self)
except Database.NotSupportedError:
pass
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit(autocommit)
def disable_constraint_checking(self):
"""
Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
to indicate constraint checks need to be re-enabled.
"""
self.cursor().execute('SET foreign_key_checks=0')
return True
def enable_constraint_checking(self):
"""
Re-enable foreign key checks after they have been disabled.
"""
# Override needs_rollback in case constraint_checks_disabled is
# nested inside transaction.atomic.
self.needs_rollback, needs_rollback = False, self.needs_rollback
try:
self.cursor().execute('SET foreign_key_checks=1')
finally:
self.needs_rollback = needs_rollback
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign key references. This method is
intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint checks were off.
Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
detailed information about the invalid reference in the error message.
Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute("""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
% (primary_key_column_name, column_name, table_name, referenced_table_name,
column_name, referenced_column_name, column_name, referenced_column_name))
for bad_row in cursor.fetchall():
raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (table_name, bad_row[0],
table_name, column_name, bad_row[1],
referenced_table_name, referenced_column_name))
def schema_editor(self, *args, **kwargs):
"Returns a new instance of this backend's SchemaEditor"
return DatabaseSchemaEditor(self, *args, **kwargs)
def is_usable(self):
try:
self.connection.ping()
except Database.Error:
return False
else:
return True
@cached_property
def mysql_version(self):
with self.temporary_connection():
server_info = self.connection.get_server_info()
match = server_version_re.match(server_info)
if not match:
raise Exception('Unable to determine MySQL version from version string %r' % server_info)
return tuple(int(x) for x in match.groups())
| dhoffman34/django | django/db/backends/mysql/base.py | Python | bsd-3-clause | 23,595 | 0.00267 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_foxtabs
----------------------------------
Tests for `foxtabs` module.
"""
import unittest
from foxtabs import foxtabs
class TestFoxtabs(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | gjost/foxtabs | tests/test_foxtabs.py | Python | bsd-3-clause | 382 | 0.005236 |
""" while case """
x = 1
while x<10:
print(x)
x = x + 1
""" while and else case """
x = 1
while x<10:
print(x)
x = x + 1
else:
print("ok")
""" while and else break case """
x = 1
while x<10:
print(x)
x = x + 1
break
else:
print("ok")
| naitoh/py2rb | tests/basic/while.py | Python | mit | 271 | 0.01107 |
# THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class ModeldbUpdateDatasetDescriptionResponse(BaseType):
def __init__(self, dataset=None):
required = {
"dataset": False,
}
self.dataset = dataset
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
from .ModeldbDataset import ModeldbDataset
tmp = d.get('dataset', None)
if tmp is not None:
d['dataset'] = ModeldbDataset.from_json(tmp)
return ModeldbUpdateDatasetDescriptionResponse(**d)
| mitdbg/modeldb | client/verta/verta/_swagger/_public/modeldb/model/ModeldbUpdateDatasetDescriptionResponse.py | Python | mit | 638 | 0.010972 |
"""Microsoft Internet Explorer cookie loading on Windows.
Copyright 2002-2003 Johnny Lee <[email protected]> (MSIE Perl code)
Copyright 2002-2006 John J Lee <[email protected]> (The Python port)
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
# XXX names and comments are not great here
import os, re, time, struct, logging
if os.name == "nt":
import _winreg
from _clientcookie import FileCookieJar, CookieJar, Cookie, \
MISSING_FILENAME_TEXT, LoadError
debug = logging.getLogger("mechanize").debug
def regload(path, leaf):
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0,
_winreg.KEY_ALL_ACCESS)
try:
value = _winreg.QueryValueEx(key, leaf)[0]
except WindowsError:
value = None
return value
WIN32_EPOCH = 0x019db1ded53e8000L # 1970 Jan 01 00:00:00 in Win32 FILETIME
def epoch_time_offset_from_win32_filetime(filetime):
"""Convert from win32 filetime to seconds-since-epoch value.
MSIE stores create and expire times as Win32 FILETIME, which is 64
bits of 100 nanosecond intervals since Jan 01 1601.
mechanize expects time in 32-bit value expressed in seconds since the
epoch (Jan 01 1970).
"""
if filetime < WIN32_EPOCH:
raise ValueError("filetime (%d) is before epoch (%d)" %
(filetime, WIN32_EPOCH))
return divmod((filetime - WIN32_EPOCH), 10000000L)[0]
def binary_to_char(c): return "%02X" % ord(c)
def binary_to_str(d): return "".join(map(binary_to_char, list(d)))
class MSIEBase:
magic_re = re.compile(r"Client UrlCache MMF Ver \d\.\d.*")
padding = "\x0d\xf0\xad\x0b"
msie_domain_re = re.compile(r"^([^/]+)(/.*)$")
cookie_re = re.compile("Cookie\:.+\@([\x21-\xFF]+).*?"
"(.+\@[\x21-\xFF]+\.txt)")
# path under HKEY_CURRENT_USER from which to get location of index.dat
reg_path = r"software\microsoft\windows" \
r"\currentversion\explorer\shell folders"
reg_key = "Cookies"
def __init__(self):
self._delayload_domains = {}
def _delayload_domain(self, domain):
# if necessary, lazily load cookies for this domain
delayload_info = self._delayload_domains.get(domain)
if delayload_info is not None:
cookie_file, ignore_discard, ignore_expires = delayload_info
try:
self.load_cookie_data(cookie_file,
ignore_discard, ignore_expires)
except (LoadError, IOError):
debug("error reading cookie file, skipping: %s", cookie_file)
else:
del self._delayload_domains[domain]
def _load_cookies_from_file(self, filename):
debug("Loading MSIE cookies file: %s", filename)
cookies = []
cookies_fh = open(filename)
try:
while 1:
key = cookies_fh.readline()
if key == "": break
rl = cookies_fh.readline
def getlong(rl=rl): return long(rl().rstrip())
def getstr(rl=rl): return rl().rstrip()
key = key.rstrip()
value = getstr()
domain_path = getstr()
flags = getlong() # 0x2000 bit is for secure I think
lo_expire = getlong()
hi_expire = getlong()
lo_create = getlong()
hi_create = getlong()
sep = getstr()
if "" in (key, value, domain_path, flags, hi_expire, lo_expire,
hi_create, lo_create, sep) or (sep != "*"):
break
m = self.msie_domain_re.search(domain_path)
if m:
domain = m.group(1)
path = m.group(2)
cookies.append({"KEY": key, "VALUE": value,
"DOMAIN": domain, "PATH": path,
"FLAGS": flags, "HIXP": hi_expire,
"LOXP": lo_expire, "HICREATE": hi_create,
"LOCREATE": lo_create})
finally:
cookies_fh.close()
return cookies
def load_cookie_data(self, filename,
ignore_discard=False, ignore_expires=False):
"""Load cookies from file containing actual cookie data.
Old cookies are kept unless overwritten by newly loaded ones.
You should not call this method if the delayload attribute is set.
I think each of these files contain all cookies for one user, domain,
and path.
filename: file containing cookies -- usually found in a file like
C:\WINNT\Profiles\joe\Cookies\joe@blah[1].txt
"""
now = int(time.time())
cookie_data = self._load_cookies_from_file(filename)
for cookie in cookie_data:
flags = cookie["FLAGS"]
secure = ((flags & 0x2000) != 0)
filetime = (cookie["HIXP"] << 32) + cookie["LOXP"]
expires = epoch_time_offset_from_win32_filetime(filetime)
if expires < now:
discard = True
else:
discard = False
domain = cookie["DOMAIN"]
initial_dot = domain.startswith(".")
if initial_dot:
domain_specified = True
else:
# MSIE 5 does not record whether the domain cookie-attribute
# was specified.
# Assuming it wasn't is conservative, because with strict
# domain matching this will match less frequently; with regular
# Netscape tail-matching, this will match at exactly the same
# times that domain_specified = True would. It also means we
# don't have to prepend a dot to achieve consistency with our
# own & Mozilla's domain-munging scheme.
domain_specified = False
# assume path_specified is false
# XXX is there other stuff in here? -- e.g. comment, commentURL?
c = Cookie(0,
cookie["KEY"], cookie["VALUE"],
None, False,
domain, domain_specified, initial_dot,
cookie["PATH"], False,
secure,
expires,
discard,
None,
None,
{"flags": flags})
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
continue
CookieJar.set_cookie(self, c)
def load_from_registry(self, ignore_discard=False, ignore_expires=False,
username=None):
"""
username: only required on win9x
"""
cookies_dir = regload(self.reg_path, self.reg_key)
filename = os.path.normpath(os.path.join(cookies_dir, "INDEX.DAT"))
self.load(filename, ignore_discard, ignore_expires, username)
def _really_load(self, index, filename, ignore_discard, ignore_expires,
username):
now = int(time.time())
if username is None:
username = os.environ['USERNAME'].lower()
cookie_dir = os.path.dirname(filename)
data = index.read(256)
if len(data) != 256:
raise LoadError("%s file is too short" % filename)
# Cookies' index.dat file starts with 32 bytes of signature
# followed by an offset to the first record, stored as a little-
# endian DWORD.
sig, size, data = data[:32], data[32:36], data[36:]
size = struct.unpack("<L", size)[0]
# check that sig is valid
if not self.magic_re.match(sig) or size != 0x4000:
raise LoadError("%s ['%s' %s] does not seem to contain cookies" %
(str(filename), sig, size))
# skip to start of first record
index.seek(size, 0)
sector = 128 # size of sector in bytes
while 1:
data = ""
# Cookies are usually in two contiguous sectors, so read in two
# sectors and adjust if not a Cookie.
to_read = 2 * sector
d = index.read(to_read)
if len(d) != to_read:
break
data = data + d
# Each record starts with a 4-byte signature and a count
# (little-endian DWORD) of sectors for the record.
sig, size, data = data[:4], data[4:8], data[8:]
size = struct.unpack("<L", size)[0]
to_read = (size - 2) * sector
## from urllib import quote
## print "data", quote(data)
## print "sig", quote(sig)
## print "size in sectors", size
## print "size in bytes", size*sector
## print "size in units of 16 bytes", (size*sector) / 16
## print "size to read in bytes", to_read
## print
if sig != "URL ":
assert sig in ("HASH", "LEAK", \
self.padding, "\x00\x00\x00\x00"), \
"unrecognized MSIE index.dat record: %s" % \
binary_to_str(sig)
if sig == "\x00\x00\x00\x00":
# assume we've got all the cookies, and stop
break
if sig == self.padding:
continue
# skip the rest of this record
assert to_read >= 0
if size != 2:
assert to_read != 0
index.seek(to_read, 1)
continue
# read in rest of record if necessary
if size > 2:
more_data = index.read(to_read)
if len(more_data) != to_read: break
data = data + more_data
cookie_re = ("Cookie\:%s\@([\x21-\xFF]+).*?" % username +
"(%s\@[\x21-\xFF]+\.txt)" % username)
m = re.search(cookie_re, data, re.I)
if m:
cookie_file = os.path.join(cookie_dir, m.group(2))
if not self.delayload:
try:
self.load_cookie_data(cookie_file,
ignore_discard, ignore_expires)
except (LoadError, IOError):
debug("error reading cookie file, skipping: %s",
cookie_file)
else:
domain = m.group(1)
i = domain.find("/")
if i != -1:
domain = domain[:i]
self._delayload_domains[domain] = (
cookie_file, ignore_discard, ignore_expires)
class MSIECookieJar(MSIEBase, FileCookieJar):
"""FileCookieJar that reads from the Windows MSIE cookies database.
MSIECookieJar can read the cookie files of Microsoft Internet Explorer
(MSIE) for Windows version 5 on Windows NT and version 6 on Windows XP and
Windows 98. Other configurations may also work, but are untested. Saving
cookies in MSIE format is NOT supported. If you save cookies, they'll be
in the usual Set-Cookie3 format, which you can read back in using an
instance of the plain old CookieJar class. Don't save using the same
filename that you loaded cookies from, because you may succeed in
clobbering your MSIE cookies index file!
You should be able to have LWP share Internet Explorer's cookies like
this (note you need to supply a username to load_from_registry if you're on
Windows 9x or Windows ME):
cj = MSIECookieJar(delayload=1)
# find cookies index file in registry and load cookies from it
cj.load_from_registry()
opener = mechanize.build_opener(mechanize.HTTPCookieProcessor(cj))
response = opener.open("http://example.com/")
Iterating over a delayloaded MSIECookieJar instance will not cause any
cookies to be read from disk. To force reading of all cookies from disk,
call read_all_cookies. Note that the following methods iterate over self:
clear_temporary_cookies, clear_expired_cookies, __len__, __repr__, __str__
and as_string.
Additional methods:
load_from_registry(ignore_discard=False, ignore_expires=False,
username=None)
load_cookie_data(filename, ignore_discard=False, ignore_expires=False)
read_all_cookies()
"""
def __init__(self, filename=None, delayload=False, policy=None):
MSIEBase.__init__(self)
FileCookieJar.__init__(self, filename, delayload, policy)
def set_cookie(self, cookie):
if self.delayload:
self._delayload_domain(cookie.domain)
CookieJar.set_cookie(self, cookie)
def _cookies_for_request(self, request):
"""Return a list of cookies to be returned to server."""
domains = self._cookies.copy()
domains.update(self._delayload_domains)
domains = domains.keys()
cookies = []
for domain in domains:
cookies.extend(self._cookies_for_domain(domain, request))
return cookies
def _cookies_for_domain(self, domain, request):
if not self._policy.domain_return_ok(domain, request):
return []
debug("Checking %s for cookies to return", domain)
if self.delayload:
self._delayload_domain(domain)
return CookieJar._cookies_for_domain(self, domain, request)
def read_all_cookies(self):
"""Eagerly read in all cookies."""
if self.delayload:
for domain in self._delayload_domains.keys():
self._delayload_domain(domain)
def load(self, filename, ignore_discard=False, ignore_expires=False,
username=None):
"""Load cookies from an MSIE 'index.dat' cookies index file.
filename: full path to cookie index file
username: only required on win9x
"""
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
index = open(filename, "rb")
try:
self._really_load(index, filename, ignore_discard, ignore_expires,
username)
finally:
index.close()
| openhatch/oh-mainline | vendor/packages/mechanize/mechanize/_msiecookiejar.py | Python | agpl-3.0 | 14,694 | 0.002178 |
# -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,time
from resources.lib.libraries import client
from resources.lib.libraries import captcha
def resolve(url):
try:
result = client.request(url)
if '>File Not Found<' in result: raise Exception()
post = {}
f = client.parseDOM(result, 'Form', attrs = {'action': ''})
k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'})
for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]})
post.update({'method_free': 'Free Download'})
post = urllib.urlencode(post)
result = client.request(url, post=post, close=False)
post = {}
f = client.parseDOM(result, 'Form', attrs = {'action': '' })
k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'})
for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]})
post.update({'method_free': 'Free Download'})
try: post.update(captcha.request(result))
except: pass
post = urllib.urlencode(post)
for i in range(0, 10):
try:
result = client.request(url, post=post, close=False)
if not '>File Download Link Generated<' in result: raise Exception()
except:
time.sleep(1)
url = client.parseDOM(result, 'a', ret='onClick')
url = [i for i in url if i.startswith('window.open')][0]
url = re.compile('[\'|\"](.+?)[\'|\"]').findall(url)[0]
return url
except:
return
| freeworldxbmc/pluging.video.Jurassic.World.Media | resources/lib/resolvers/clicknupload.py | Python | gpl-3.0 | 2,319 | 0.012937 |
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class UsedSalesOrder(Document):
pass
| aruizramon/alec_erpnext | erpnext/selling/doctype/used_sales_order/used_sales_order.py | Python | agpl-3.0 | 276 | 0.01087 |
from __future__ import unicode_literals
from django.db import models
from django.forms import ModelForm
from django.contrib.auth.models import User
from boissons.models import boissons
# Create your models here.
class consommateurs(models.Model):
def __str__(self):
return "%s" % (self.user.username)
user = models.OneToOneField(User, on_delete=models.CASCADE)
activation_key = models.CharField(max_length=60)
key_expires = models.DateTimeField()
class consommation(models.Model):
def __str__(self):
manuel = "MANUEL" if self.manuel else ""
return "%s %s %s %s" % (self.date.strftime("%F"),self.consommateur.user.username,self.boisson.name,manuel)
date = models.DateField(auto_now_add=True)
consommateur = models.ForeignKey('consommateurs',on_delete=models.PROTECT)
boisson = models.ForeignKey('boissons.boissons',on_delete=models.PROTECT)
manuel = models.BooleanField(default=True)
class ConsommateursForm(ModelForm):
class Meta:
model = User
fields = ('username', 'email', 'password')
| jadjay/GestionBoissonDj | gestionboisson/consommateurs/models.py | Python | gpl-3.0 | 1,011 | 0.023739 |
# This is modified from a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
import array
import struct
import zlib
from enum import Enum
from pkg_resources import parse_version
from kaitaistruct import __version__ as ks_version, KaitaiStruct, KaitaiStream, BytesIO
if parse_version(ks_version) < parse_version('0.7'):
raise Exception(
"Incompatible Kaitai Struct Python API: 0.7 or later is required, but you have %s" % (ks_version))
from .cfg_2 import Cfg2
from .header import Header
from .data import Data
from .cfg_3 import Cfg3
from .command import Command
def _kaitai_repr(self):
_repr_list = []
for item in vars(self):
if not item.startswith('_'):
_r = getattr(self, item)
if type(_r) in (int, float, str, bytes, bool):
_repr_list.append("=".join((item, _r.__repr__())))
else:
_repr_list.append(item)
return "<" + self.__class__.__name__ + " |" + ", ".join(_repr_list) + ">"
def _enum_repr(self):
_repr_list = []
for item in ("name", "value"):
_r = getattr(self, item)
_repr_list.append("=".join((item, _r.__repr__())))
return "<" + self.__class__.__name__[:-4] + " |" + ", ".join(_repr_list) + ">"
def _kaitai_show(self, parent_path=' '):
if type(self) in (int, float, str, bytes, bool):
print(" == ".join((parent_path, self.__repr__())))
elif type(self) == list:
for i, item in enumerate(self):
try:
item.show('{}[{}]'.format(parent_path,i))
except:
_kaitai_show(item,'{}[{}]'.format(parent_path,i))
else:
for item in sorted(vars(self)):
if not item.startswith('_'):
_r = getattr(self, item)
try:
_r.show(parent_path+'.'+item)
except:
_kaitai_show(_r,parent_path+'.'+item)
def _enum_show(self, parent_path=' '):
for item in ("name", "value"):
_r = getattr(self, item)
print(parent_path+'.'+item+' == '+_r.__repr__())
KaitaiStruct.__repr__ = _kaitai_repr
Enum.__repr__ = _enum_repr
KaitaiStruct.show = _kaitai_show
Enum.show = _enum_show
#msg.show()
class PhasorMessage(KaitaiStruct):
def __repr__(self):
_repr_list = [
"time=" + str(self.time)] if self.fracsec.fraction_of_second else []
for item in vars(self):
if not item.startswith('_'):
_r = getattr(self, item)
if type(_r) in (int, float, str, bytes):
_repr_list.append("=".join((item, _r.__repr__())))
else:
_repr_list.append(item)
return "<" + self.__class__.__name__ + " |" + ", ".join(_repr_list) + ">"
def show(self, parent_path=' '):
if self.fracsec.fraction_of_second:
print(parent_path+'.time == '+str(self.time))
_kaitai_show(self, parent_path)
def __init__(self, _io, _parent=None, _root=None, _mini_cfgs=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._pkt_pos = self._io.pos()
self.sync = self._root.SyncWord(self._io, self, self._root)
self.framesize = self._io.read_u2be()
self.idcode = self._io.read_u2be()
self._mini_cfg = _mini_cfgs.mini_cfg[self.idcode]
self.soc = self._io.read_u4be()
self.fracsec = self._root.Fracsec(self._io, self, self._root,
self._mini_cfg.time_base.time_base if self._mini_cfg else None)
_on = self.sync.frame_type.value
if _on == 0:
if self._mini_cfg:
self.data = Data(self._io, _mini_cfg=self._mini_cfg)
else:
self.data = self._io.read_bytes((self.framesize - 16))
elif _on == 3:
self._raw_data = self._io.read_bytes((self.framesize - 16))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = Cfg2(io)
_mini_cfgs.add_cfg(self.idcode, self.data)
elif _on == 4:
self._raw_data = self._io.read_bytes((self.framesize - 16))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = Command(io)
elif _on == 5:
_mini_cfgs.add_cfg(self.raw_pkt)
self._raw_data = self._io.read_bytes((self.framesize - 16))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = Cfg3(io)
elif _on == 2:
self._raw_data = self._io.read_bytes((self.framesize - 16))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = Cfg2(io)
elif _on == 1:
self._raw_data = self._io.read_bytes((self.framesize - 16))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = Header(io)
self.chk = self._io.read_u2be()
class SyncWord(KaitaiStruct):
class FrameTypeEnum(Enum):
data = 0
header = 1
cfg1 = 2
cfg2 = 3
cmd = 4
cfg3 = 5
class VersionNumberEnum(Enum):
c_37_118_2005 = 1
c_37_118_2_2011 = 2
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.ensure_fixed_contents(struct.pack('1b', -86))
self.reserved = self._io.read_bits_int(1) != 0
self.frame_type = self._root.SyncWord.FrameTypeEnum(
self._io.read_bits_int(3))
self.version_number = self._root.SyncWord.VersionNumberEnum(
self._io.read_bits_int(4))
class Fracsec(KaitaiStruct):
def __repr__(self):
_repr_list = ["fraction_of_second=" +
str(self.fraction_of_second)] if self.fraction_of_second else []
for item in vars(self):
if not item.startswith('_'):
_r = getattr(self, item)
if type(_r) in (int, float, str):
_repr_list.append("=".join((item, _r.__repr__())))
else:
_repr_list.append(item)
return "<" + self.__class__.__name__ + " |" + ", ".join(_repr_list) + ">"
def show(self, parent_path):
if self.fraction_of_second:
print(parent_path+'.fraction_of_second == ' + str(self.fraction_of_second))
_kaitai_show(self, parent_path)
class LeapSecondDirectionEnum(Enum):
add = 0
delete = 1
class MsgTqEnum(Enum):
normal_operation_clock_locked_to_utc_traceable_source = 0
time_within_10_to_9_s_of_utc = 1
time_within_10_to_8_s_of_utc = 2
time_within_10_to_7_s_of_utc = 3
time_within_10_to_6_s_of_utc = 4
time_within_10_to_5_s_of_utc = 5
time_within_10_to_4_s_of_utc = 6
time_within_10_to_3_s_of_utc = 7
time_within_10_to_2_s_of_utc = 8
time_within_10_to_1_s_of_utc = 9
time_within_1_s_of_utc = 10
time_within_10_s_of_utc = 11
fault_clock_failure_time_not_reliable = 15
def __init__(self, _io, _parent=None, _root=None, _time_base=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._time_base = _time_base
self.reserved = self._io.read_bits_int(1) != 0
self.leap_second_direction = self._root.Fracsec.LeapSecondDirectionEnum(
self._io.read_bits_int(1))
self.leap_second_occurred = self._io.read_bits_int(1) != 0
self.leap_second_pending = self._io.read_bits_int(1) != 0
self.time_quality = self._root.Fracsec.MsgTqEnum(
self._io.read_bits_int(4))
self.raw_fraction_of_second = self._io.read_bits_int(24)
@property
def fraction_of_second(self):
if hasattr(self, '_m_fraction_of_second'):
return self._m_fraction_of_second if hasattr(self, '_m_fraction_of_second') else None
if self._time_base:
self._m_fraction_of_second = self.raw_fraction_of_second / self._time_base
return self._m_fraction_of_second if hasattr(self, '_m_fraction_of_second') else None
@property
def time(self):
if hasattr(self, '_m_time'):
return self._m_time if hasattr(self, '_m_time') else None
self._m_time = self.soc + self.fracsec.fraction_of_second
return self._m_time if hasattr(self, '_m_time') else None
@property
def chk_body(self):
if hasattr(self, '_m_chk_body'):
return self._m_chk_body if hasattr(self, '_m_chk_body') else None
_pos = self._io.pos()
self._io.seek(0)
self._m_chk_body = self._io.read_bytes((self.framesize - 2))
self._io.seek(_pos)
return self._m_chk_body if hasattr(self, '_m_chk_body') else None
@property
def raw_pkt(self):
if hasattr(self, '_m_pkt'):
return self._m_pkt if hasattr(self, '_m_pkt') else None
_pos = self._io.pos()
self._io.seek(self._pkt_pos)
self._m_pkt = self._io.read_bytes(self.framesize)
self._io.seek(_pos)
return self._m_pkt if hasattr(self, '_m_pkt') else None
| sonusz/PhasorToolBox | phasortoolbox/parser/common.py | Python | mit | 9,531 | 0.002833 |
class MinStack:
def __init__(self):
"""
initialize your data structure here.
"""
from collections import deque
self.stack = deque()
self.stack.append((None, float('inf')))
def push(self, x: int) -> None:
self.stack.append((x, min(x, self.stack[-1][1])))
def pop(self) -> None:
return self.stack.pop()[0]
def top(self) -> int:
return self.stack[-1][0]
def getMin(self) -> int:
return self.stack[-1][1]
# Your MinStack object will be instantiated and called as such:
# obj = MinStack()
# obj.push(x)
# obj.pop()
# param_3 = obj.top()
# param_4 = obj.getMin()
| saisankargochhayat/algo_quest | leetcode/155. Min Stack/soln.py | Python | apache-2.0 | 664 | 0 |
# Copyright 2015-2017 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""BGP Attribute MP_UNREACH_NLRI
"""
import struct
from yabgp.message.attribute import Attribute
from yabgp.message.attribute import AttributeFlag
from yabgp.message.attribute import AttributeID
from yabgp.message.attribute.nlri.ipv4_mpls_vpn import IPv4MPLSVPN
from yabgp.message.attribute.nlri.ipv6_mpls_vpn import IPv6MPLSVPN
from yabgp.message.attribute.nlri.ipv4_flowspec import IPv4FlowSpec
from yabgp.message.attribute.nlri.ipv6_unicast import IPv6Unicast
from yabgp.message.attribute.nlri.labeled_unicast.ipv4 import IPv4LabeledUnicast
from yabgp.message.attribute.nlri.evpn import EVPN
from yabgp.message.attribute.nlri.linkstate import BGPLS
from yabgp.message.attribute.nlri.ipv4_srte import IPv4SRTE
from yabgp.common import afn
from yabgp.common import safn
from yabgp.common import exception as excep
from yabgp.common import constants as bgp_cons
class MpUnReachNLRI(Attribute):
"""
This is an optional non-transitive attribute that can be used for the
purpose of withdrawing multiple unfeasible routes from service.
An UPDATE message that contains the MP_UNREACH_NLRI is not required
to carry any other path attributes.
MP_UNREACH_NLRI coding information
+---------------------------------------------------------+
| Address Family Identifier (2 octets) |
+---------------------------------------------------------+
| Subsequent Address Family Identifier (1 octet) |
+---------------------------------------------------------+
| Withdrawn Routes (variable) |
+---------------------------------------------------------+
"""
ID = AttributeID.MP_UNREACH_NLRI
FLAG = AttributeFlag.OPTIONAL + AttributeFlag.EXTENDED_LENGTH
@classmethod
def parse(cls, value):
try:
afi, safi = struct.unpack('!HB', value[0:3])
except Exception:
raise excep.UpdateMessageError(sub_error=bgp_cons.ERR_MSG_UPDATE_ATTR_LEN,
data='')
nlri_bin = value[3:]
# for IPv4
if afi == afn.AFNUM_INET:
# VPNv4
if safi == safn.SAFNUM_LAB_VPNUNICAST:
nlri = IPv4MPLSVPN.parse(nlri_bin, iswithdraw=True)
return dict(afi_safi=(afi, safi), withdraw=nlri)
# BGP flow spec
elif safi == safn.SAFNUM_FSPEC_RULE:
# if nlri length is greater than 240 bytes, it is encoded over 2 bytes
withdraw_list = []
while nlri_bin:
length = ord(nlri_bin[0:1])
if length >> 4 == 0xf and len(nlri_bin) > 2:
length = struct.unpack('!H', nlri_bin[:2])[0]
nlri_tmp = nlri_bin[2: length + 2]
nlri_bin = nlri_bin[length + 2:]
else:
nlri_tmp = nlri_bin[1: length + 1]
nlri_bin = nlri_bin[length + 1:]
nlri = IPv4FlowSpec.parse(nlri_tmp)
if nlri:
withdraw_list.append(nlri)
return dict(afi_safi=(afi, safi), withdraw=withdraw_list)
else:
return dict(afi_safi=(afn.AFNUM_INET, safi), withdraw=repr(nlri_bin))
# for ipv6
elif afi == afn.AFNUM_INET6:
# for ipv6 unicast
if safi == safn.SAFNUM_UNICAST:
return dict(afi_safi=(afi, safi), withdraw=IPv6Unicast.parse(nlri_data=nlri_bin))
elif safi == safn.SAFNUM_LAB_VPNUNICAST:
return dict(afi_safi=(afi, safi), withdraw=IPv6MPLSVPN.parse(value=nlri_bin, iswithdraw=True))
else:
return dict(afi_safi=(afi, safi), withdraw=repr(nlri_bin))
# for l2vpn
elif afi == afn.AFNUM_L2VPN:
# for evpn
if safi == safn.SAFNUM_EVPN:
return dict(afi_safi=(afi, safi), withdraw=EVPN.parse(nlri_data=nlri_bin))
else:
return dict(afi_safi=(afi, safi), withdraw=repr(nlri_bin))
# BGP LS
elif afi == afn.AFNUM_BGPLS:
if safi == safn.SAFNUM_BGPLS:
withdraw = BGPLS.parse(nlri_bin)
return dict(afi_safi=(afi, safi), withdraw=withdraw)
else:
pass
else:
return dict(afi_safi=(afi, safi), withdraw=repr(nlri_bin))
@classmethod
def construct(cls, value):
"""Construct a attribute
:param value: python dictionary
{'afi_safi': (1,128),
'withdraw': []
"""
afi, safi = value['afi_safi']
if afi == afn.AFNUM_INET:
if safi == safn.SAFNUM_LAB_VPNUNICAST: # MPLS VPN
nlri = IPv4MPLSVPN.construct(value['withdraw'], iswithdraw=True)
if nlri:
attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri
return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \
+ struct.pack('!H', len(attr_value)) + attr_value
else:
return None
elif safi == safn.SAFNUM_FSPEC_RULE:
try:
nlri_list = value.get('withdraw') or []
if not nlri_list:
return None
nlri_hex = b''
nlri_hex += IPv4FlowSpec.construct(value=nlri_list)
attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri_hex
return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \
+ struct.pack('!H', len(attr_value)) + attr_value
except Exception:
raise excep.ConstructAttributeFailed(
reason='failed to construct attributes',
data=value
)
elif safi == safn.SAFNUM_SRTE:
try:
nlri_list = value.get('withdraw') or {}
if not nlri_list:
return None
nlri_hex = b''
nlri_hex += IPv4SRTE.construct(data=value['withdraw'])
attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri_hex
return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \
+ struct.pack('!H', len(attr_value)) + attr_value
except Exception:
raise excep.ConstructAttributeFailed(
reason='failed to construct attributes',
data=value
)
elif safi == safn.SAFNUM_MPLS_LABEL:
try:
nlri_list = value.get('withdraw') or []
if not nlri_list:
return None
nlri_hex = b''
flag = 'withdraw'
nlri_hex += IPv4LabeledUnicast.construct(nlri_list, flag)
attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri_hex
return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \
+ struct.pack('!H', len(attr_value)) + attr_value
except Exception:
raise excep.ConstructAttributeFailed(
reason='failed to construct attributes',
data=value
)
else:
raise excep.ConstructAttributeFailed(
reason='unsupport this sub address family',
data=value)
elif afi == afn.AFNUM_INET6:
if safi == safn.SAFNUM_UNICAST:
nlri = IPv6Unicast.construct(nlri_list=value['withdraw'])
if nlri:
attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri
return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \
+ struct.pack('!H', len(attr_value)) + attr_value
elif safi == safn.SAFNUM_LAB_VPNUNICAST:
nlri = IPv6MPLSVPN.construct(value=value['withdraw'], iswithdraw=True)
if nlri:
attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri
return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \
+ struct.pack('!H', len(attr_value)) + attr_value
else:
return None
# for l2vpn
elif afi == afn.AFNUM_L2VPN:
# for evpn
if safi == safn.SAFNUM_EVPN:
nlri = EVPN.construct(nlri_list=value['withdraw'])
if nlri:
attr_value = struct.pack('!H', afi) + struct.pack('!B', safi) + nlri
return struct.pack('!B', cls.FLAG) + struct.pack('!B', cls.ID) \
+ struct.pack('!H', len(attr_value)) + attr_value
else:
return None
else:
raise excep.ConstructAttributeFailed(
reason='unsupport this sub address family',
data=value)
| meidli/yabgp | yabgp/message/attribute/mpunreachnlri.py | Python | apache-2.0 | 9,951 | 0.002311 |
import urllib, urllib2, sys, httplib
url = "/MELA/REST_WS"
HOST_IP="109.231.126.217:8180"
#HOST_IP="localhost:8180"
if __name__=='__main__':
connection = httplib.HTTPConnection(HOST_IP)
description_file = open("./costTest.xml", "r")
body_content = description_file.read()
headers={
'Content-Type':'application/xml; charset=utf-8',
'Accept':'application/json, multipart/related'
}
connection.request('PUT', url+'/service', body=body_content,headers=headers,)
result = connection.getresponse()
print result.read()
| tuwiendsg/MELA | MELA-Extensions/MELA-ComplexCostEvaluationService/tests/mela-clients/submitServiceDescription.py | Python | apache-2.0 | 589 | 0.050934 |
"""SANE backend.
$Id$"""
import sane
from imagescanner.backends import base
class ScannerManager(base.ScannerManager):
def _refresh(self):
self._devices = []
sane.init()
devices = sane.get_devices()
for dev in devices:
# Check if sane is able to open this device, if not just skip
try:
scanner = sane.open(dev[0])
scanner.close()
except:
continue
scanner_id = 'sane-%s' % len(self._devices)
scanner = Scanner(scanner_id, dev[0], dev[1], dev[2], dev[3])
self._devices.append(scanner)
sane.exit()
class Scanner(base.Scanner):
def __init__(self, scanner_id, device, manufacturer, name, description):
self.id = scanner_id
self.manufacturer = manufacturer
self.name = name
self.description = description
self._device = device
def __repr__(self):
return '<%s: %s - %s>' % (self.id, self.manufacturer, self.name)
def scan(self, dpi=200):
sane.init()
scanner = sane.open(self._device)
image = scanner.scan()
scanner.close()
sane.exit()
return image
| Eveler/libs | __Python__/edv/edv/imagescanner/backends/sane/__init__.py | Python | gpl-3.0 | 1,278 | 0.008607 |
#!/usr/bin/env python -u
'''
This script does the following
1. Connect the router to the public network
2. Add a public key
3. Boot a cirros instance
4. Attach a floating IP
'''
from __future__ import print_function
import datetime
import os.path
import socket
import sys
import time
from novaclient.v1_1 import client as novaclient
from neutronclient.v2_0 import client as neutronclient
auth_url = "http://192.168.27.100:35357/v2.0"
username = "demo"
password = "password"
tenant_name = "demo"
neutron = neutronclient.Client(auth_url=auth_url,
username=username,
password=password,
tenant_name=tenant_name)
nova = novaclient.Client(auth_url=auth_url,
username=username,
api_key=password,
project_id=tenant_name)
if not nova.keypairs.findall(name="mykey"):
print("Creating keypair: mykey...")
with open(os.path.expanduser('~/.ssh/id_rsa.pub')) as fpubkey:
nova.keypairs.create(name="mykey", public_key=fpubkey.read())
print("done")
print("Booting cirros instance...", end='')
image = nova.images.find(name="cirros-0.3.1-x86_64-uec")
flavor = nova.flavors.find(name="m1.tiny")
instance = nova.servers.create(name="cirros", image=image, flavor=flavor,
key_name="mykey")
# Poll at 5 second intervals, until the status is no longer 'BUILD'
status = instance.status
while status == 'BUILD':
time.sleep(5)
# Retrieve the instance again so the status field updates
instance = nova.servers.get(instance.id)
status = instance.status
print("done")
print("Creating floating ip...", end='')
# Get external network
ext_net, = [x for x in neutron.list_networks()['networks']
if x['router:external']]
# Get the port corresponding to the instance
port, = [x for x in neutron.list_ports()['ports']
if x['device_id'] == instance.id]
# Create the floating ip
args = dict(floating_network_id=ext_net['id'],
port_id=port['id'])
ip_obj = neutron.create_floatingip(body={'floatingip': args})
print("done")
ip = ip_obj['floatingip']['floating_ip_address']
print("IP:{}".format(ip))
print("Waiting for ssh to be ready on cirros instance...", end='')
start = datetime.datetime.now()
timeout = 120
end = start + datetime.timedelta(seconds=timeout)
port = 22
connect_timeout = 5
# From utilities/wait_for of ansible
while datetime.datetime.now() < end:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(connect_timeout)
try:
s.connect((ip, port))
s.shutdown(socket.SHUT_RDWR)
s.close()
print()
break
except:
time.sleep(1)
pass
else:
print("ssh server never came up!")
sys.exit(1)
| naototty/devstack-vagrant-Ironic | boot-cirros.py | Python | apache-2.0 | 2,863 | 0.002445 |
from office365.runtime.client_object import ClientObject
from office365.runtime.queries.service_operation_query import ServiceOperationQuery
from office365.runtime.paths.resource_path import ResourcePath
from office365.sharepoint.userprofiles.userProfile import UserProfile
class ProfileLoader(ClientObject):
def __init__(self, context):
super(ProfileLoader, self).__init__(context, ResourcePath("SP.UserProfiles.ProfileLoader.GetProfileLoader"))
@staticmethod
def get_profile_loader(context):
"""
:type: office365.sharepoint.client_context.ClientContext context
"""
result = ProfileLoader(context)
qry = ServiceOperationQuery(result, "GetProfileLoader", None, None, None, result)
qry.static = True
context.add_query(qry)
return result
def get_user_profile(self):
result = UserProfile(self.context, ResourcePath("GetUserProfile", self.resource_path))
qry = ServiceOperationQuery(self, "GetUserProfile", None, None, None, result)
self.context.add_query(qry)
return result
@property
def entity_type_name(self):
return "SP.UserProfiles.ProfileLoader"
| vgrem/Office365-REST-Python-Client | office365/sharepoint/userprofiles/profileLoader.py | Python | mit | 1,190 | 0.004202 |
"Unit/Functional tests"
import os
from django.contrib.admin.options import ModelAdmin
from django.contrib.admin.sites import AdminSite
from django.test import TestCase
from django.db import models, transaction
from django.contrib.auth.models import User
from django.db.models import Q
from django.conf import settings
from django import VERSION as DJANGO_VERSION
from django.utils.functional import wraps
from treebeard import numconv
from treebeard.exceptions import InvalidPosition, InvalidMoveToDescendant, \
PathOverflow, MissingNodeOrderBy
from treebeard.forms import MoveNodeForm
from treebeard.tests.models import *
# ghetto app detection, there is probably some introspection method,
# but meh, this works
HAS_DJANGO_AUTH = 'django.contrib.auth' in settings.INSTALLED_APPS
BASE_DATA = [
{'data':{'desc':'1'}},
{'data':{'desc':'2'}, 'children':[
{'data':{'desc':'21'}},
{'data':{'desc':'22'}},
{'data':{'desc':'23'}, 'children':[
{'data':{'desc':'231'}},
]},
{'data':{'desc':'24'}},
]},
{'data':{'desc':'3'}},
{'data':{'desc':'4'}, 'children':[
{'data':{'desc':'41'}},
]},
]
def testtype(treetype, proxy):
def decorator(f):
@wraps(f)
def _testtype(self):
{'MP': self.set_MP,
'AL': self.set_AL,
'NS': self.set_NS}[treetype](proxy)
try:
f(self)
finally:
transaction.rollback()
self.model = None
self.sorted_model = None
self.dep_model = None
return _testtype
return decorator
def _load_test_methods(cls, proxy=True):
if proxy and DJANGO_VERSION >= (1, 1):
proxyopts = (False, True)
else:
proxyopts = (False,)
for m in dir(cls):
if not m.startswith('_multi_'):
continue
for t in ('MP', 'AL', 'NS'):
for p in proxyopts:
deco = testtype(t, p)
if p:
_proxy = '_proxy'
else:
_proxy = ''
name = 'test_%s%s_%s' % (t.lower(),
_proxy,
m.split('_', 2)[2])
setattr(cls, name, deco(getattr(cls, m)))
class TestTreeBase(TestCase):
def setUp(self):
self.set_MP()
self.unchanged = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
def set_MP(self, proxy=False):
if proxy and DJANGO_VERSION >= (1, 1):
self.model = MP_TestNode_Proxy
else:
self.model = MP_TestNode
self.sorted_model = MP_TestNodeSorted
self.dep_model = MP_TestNodeSomeDep
def set_NS(self, proxy=False):
if proxy and DJANGO_VERSION >= (1, 1):
self.model = NS_TestNode_Proxy
else:
self.model = NS_TestNode
self.sorted_model = NS_TestNodeSorted
self.dep_model = NS_TestNodeSomeDep
def set_AL(self, proxy=False):
if proxy and DJANGO_VERSION >= (1, 1):
self.model = AL_TestNode_Proxy
else:
self.model = AL_TestNode
self.sorted_model = AL_TestNodeSorted
self.dep_model = AL_TestNodeSomeDep
def got(self):
nsmodels = [NS_TestNode]
if DJANGO_VERSION >= (1, 1):
nsmodels.append(NS_TestNode_Proxy)
if self.model in nsmodels:
# this slows down nested sets tests quite a bit, but it has the
# advantage that we'll check the node edges are correct
d = {}
for tree_id, lft, rgt in self.model.objects.values_list('tree_id',
'lft',
'rgt'):
d.setdefault(tree_id, []).extend([lft, rgt])
for tree_id, got_edges in d.items():
self.assertEqual(len(got_edges), max(got_edges))
good_edges = range(1, len(got_edges) + 1)
self.assertEqual(sorted(got_edges), good_edges)
return [(o.desc, o.get_depth(), o.get_children_count())
for o in self.model.get_tree()]
def _assert_get_annotated_list(self, expected, parent=None):
got = [
(obj[0].desc, obj[1]['open'], obj[1]['close'], obj[1]['level'])
for obj in self.model.get_annotated_list(parent)]
self.assertEqual(expected, got)
class TestEmptyTree(TestTreeBase):
def _multi_load_bulk_empty(self):
ids = self.model.load_bulk(BASE_DATA)
got_descs = [obj.desc
for obj in self.model.objects.filter(id__in=ids)]
expected_descs = [x[0] for x in self.unchanged]
self.assertEqual(sorted(got_descs), sorted(expected_descs))
self.assertEqual(self.got(), self.unchanged)
def _multi_dump_bulk_empty(self):
self.assertEqual(self.model.dump_bulk(), [])
def _multi_add_root_empty(self):
self.model.add_root(desc='1')
expected = [(u'1', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_get_root_nodes_empty(self):
got = self.model.get_root_nodes()
expected = []
self.assertEqual([node.desc for node in got], expected)
def _multi_get_first_root_node_empty(self):
got = self.model.get_first_root_node()
self.assertEqual(got, None)
def _multi_get_last_root_node_empty(self):
got = self.model.get_last_root_node()
self.assertEqual(got, None)
def _multi_get_tree(self):
got = list(self.model.get_tree())
self.assertEqual(got, [])
def _multi_get_annotated_list(self):
expected = []
self._assert_get_annotated_list(expected)
class TestNonEmptyTree(TestTreeBase):
def setUp(self):
super(TestNonEmptyTree, self).setUp()
MP_TestNode.load_bulk(BASE_DATA)
AL_TestNode.load_bulk(BASE_DATA)
NS_TestNode.load_bulk(BASE_DATA)
class TestClassMethods(TestNonEmptyTree):
def setUp(self):
super(TestClassMethods, self).setUp()
def _multi_load_bulk_existing(self):
# inserting on an existing node
node = self.model.objects.get(desc=u'231')
ids = self.model.load_bulk(BASE_DATA, node)
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 4),
(u'1', 4, 0),
(u'2', 4, 4),
(u'21', 5, 0),
(u'22', 5, 0),
(u'23', 5, 1),
(u'231', 6, 0),
(u'24', 5, 0),
(u'3', 4, 0),
(u'4', 4, 1),
(u'41', 5, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
expected_descs = [u'1', u'2', u'21', u'22', u'23', u'231', u'24',
u'3', u'4', u'41']
got_descs = [obj.desc
for obj in self.model.objects.filter(id__in=ids)]
self.assertEqual(sorted(got_descs), sorted(expected_descs))
self.assertEqual(self.got(), expected)
def _multi_get_tree_all(self):
got = [(o.desc, o.get_depth(), o.get_children_count())
for o in self.model.get_tree()]
self.assertEqual(got, self.unchanged)
def _multi_dump_bulk_all(self):
self.assertEqual(self.model.dump_bulk(keep_ids=False), BASE_DATA)
def _multi_get_tree_node(self):
node = self.model.objects.get(desc=u'231')
self.model.load_bulk(BASE_DATA, node)
# the tree was modified by load_bulk, so we reload our node object
node = self.model.objects.get(pk=node.id)
got = [(o.desc, o.get_depth(), o.get_children_count())
for o in self.model.get_tree(node)]
expected = [(u'231', 3, 4),
(u'1', 4, 0),
(u'2', 4, 4),
(u'21', 5, 0),
(u'22', 5, 0),
(u'23', 5, 1),
(u'231', 6, 0),
(u'24', 5, 0),
(u'3', 4, 0),
(u'4', 4, 1),
(u'41', 5, 0)]
self.assertEqual(got, expected)
def _multi_get_tree_leaf(self):
node = self.model.objects.get(desc=u'1')
self.assertEqual(0, node.get_children_count())
got = [(o.desc, o.get_depth(), o.get_children_count())
for o in self.model.get_tree(node)]
expected = [(u'1', 1, 0)]
self.assertEqual(got, expected)
def _multi_get_annotated_list_all(self):
expected = [(u'1', True, [], 0), (u'2', False, [], 0),
(u'21', True, [], 1), (u'22', False, [], 1),
(u'23', False, [], 1), (u'231', True, [0], 2),
(u'24', False, [0], 1), (u'3', False, [], 0),
(u'4', False, [], 0), (u'41', True, [0, 1], 1)]
self._assert_get_annotated_list(expected)
def _multi_get_annotated_list_node(self):
node = self.model.objects.get(desc=u'2')
expected = [(u'2', True, [], 0), (u'21', True, [], 1),
(u'22', False, [], 1), (u'23', False, [], 1),
(u'231', True, [0], 2), (u'24', False, [0, 1], 1)]
self._assert_get_annotated_list(expected, node)
def _multi_get_annotated_list_leaf(self):
node = self.model.objects.get(desc=u'1')
expected = [(u'1', True, [0], 0)]
self._assert_get_annotated_list(expected, node)
def _multi_dump_bulk_node(self):
node = self.model.objects.get(desc=u'231')
self.model.load_bulk(BASE_DATA, node)
# the tree was modified by load_bulk, so we reload our node object
node = self.model.objects.get(pk=node.id)
got = self.model.dump_bulk(node, False)
expected = [{'data':{'desc':u'231'}, 'children':BASE_DATA}]
self.assertEqual(got, expected)
def _multi_load_and_dump_bulk_keeping_ids(self):
exp = self.model.dump_bulk(keep_ids=True)
self.model.objects.all().delete()
self.model.load_bulk(exp, None, True)
got = self.model.dump_bulk(keep_ids=True)
self.assertEqual(got, exp)
# do we really have an unchaged tree after the dump/delete/load?
got = [(o.desc, o.get_depth(), o.get_children_count())
for o in self.model.get_tree()]
self.assertEqual(got, self.unchanged)
def _multi_get_root_nodes(self):
got = self.model.get_root_nodes()
expected = ['1', '2', '3', '4']
self.assertEqual([node.desc for node in got], expected)
def _multi_get_first_root_node(self):
got = self.model.get_first_root_node()
self.assertEqual(got.desc, '1')
def _multi_get_last_root_node(self):
got = self.model.get_last_root_node()
self.assertEqual(got.desc, '4')
def _multi_add_root(self):
obj = self.model.add_root(desc='5')
self.assertEqual(obj.get_depth(), 1)
self.assertEqual(self.model.get_last_root_node().desc, '5')
class TestSimpleNodeMethods(TestNonEmptyTree):
def _multi_is_root(self):
data = [
('2', True),
('1', True),
('4', True),
('21', False),
('24', False),
('22', False),
('231', False),
]
for desc, expected in data:
got = self.model.objects.get(desc=desc).is_root()
self.assertEqual(got, expected)
def _multi_is_leaf(self):
data = [
('2', False),
('23', False),
('231', True),
]
for desc, expected in data:
got = self.model.objects.get(desc=desc).is_leaf()
self.assertEqual(got, expected)
def _multi_get_root(self):
data = [
('2', '2'),
('1', '1'),
('4', '4'),
('21', '2'),
('24', '2'),
('22', '2'),
('231', '2'),
]
for desc, expected in data:
node = self.model.objects.get(desc=desc).get_root()
self.assertEqual(node.desc, expected)
def _multi_get_parent(self):
data = [
('2', None),
('1', None),
('4', None),
('21', '2'),
('24', '2'),
('22', '2'),
('231', '23'),
]
data = dict(data)
objs = {}
for desc, expected in data.items():
node = self.model.objects.get(desc=desc)
parent = node.get_parent()
if expected:
self.assertEqual(parent.desc, expected)
else:
self.assertEqual(parent, None)
objs[desc] = node
# corrupt the objects' parent cache
node._parent_obj = 'CORRUPTED!!!'
for desc, expected in data.items():
node = objs[desc]
# asking get_parent to not use the parent cache (since we
# corrupted it in the previous loop)
parent = node.get_parent(True)
if expected:
self.assertEqual(parent.desc, expected)
else:
self.assertEqual(parent, None)
def _multi_get_children(self):
data = [
('2', ['21', '22', '23', '24']),
('23', ['231']),
('231', []),
]
for desc, expected in data:
children = self.model.objects.get(desc=desc).get_children()
self.assertEqual([node.desc for node in children], expected)
def _multi_get_children_count(self):
data = [
('2', 4),
('23', 1),
('231', 0),
]
for desc, expected in data:
got = self.model.objects.get(desc=desc).get_children_count()
self.assertEqual(got, expected)
def _multi_get_siblings(self):
data = [
('2', ['1', '2', '3', '4']),
('21', ['21', '22', '23', '24']),
('231', ['231']),
]
for desc, expected in data:
siblings = self.model.objects.get(desc=desc).get_siblings()
self.assertEqual([node.desc for node in siblings], expected)
def _multi_get_first_sibling(self):
data = [
('2', '1'),
('1', '1'),
('4', '1'),
('21', '21'),
('24', '21'),
('22', '21'),
('231', '231'),
]
for desc, expected in data:
node = self.model.objects.get(desc=desc).get_first_sibling()
self.assertEqual(node.desc, expected)
def _multi_get_prev_sibling(self):
data = [
('2', '1'),
('1', None),
('4', '3'),
('21', None),
('24', '23'),
('22', '21'),
('231', None),
]
for desc, expected in data:
node = self.model.objects.get(desc=desc).get_prev_sibling()
if expected is None:
self.assertEqual(node, None)
else:
self.assertEqual(node.desc, expected)
def _multi_get_next_sibling(self):
data = [
('2', '3'),
('1', '2'),
('4', None),
('21', '22'),
('24', None),
('22', '23'),
('231', None),
]
for desc, expected in data:
node = self.model.objects.get(desc=desc).get_next_sibling()
if expected is None:
self.assertEqual(node, None)
else:
self.assertEqual(node.desc, expected)
def _multi_get_last_sibling(self):
data = [
('2', '4'),
('1', '4'),
('4', '4'),
('21', '24'),
('24', '24'),
('22', '24'),
('231', '231'),
]
for desc, expected in data:
node = self.model.objects.get(desc=desc).get_last_sibling()
self.assertEqual(node.desc, expected)
def _multi_get_first_child(self):
data = [
('2', '21'),
('21', None),
('23', '231'),
('231', None),
]
for desc, expected in data:
node = self.model.objects.get(desc=desc).get_first_child()
if expected is None:
self.assertEqual(node, None)
else:
self.assertEqual(node.desc, expected)
def _multi_get_last_child(self):
data = [
('2', '24'),
('21', None),
('23', '231'),
('231', None),
]
for desc, expected in data:
node = self.model.objects.get(desc=desc).get_last_child()
if expected is None:
self.assertEqual(node, None)
else:
self.assertEqual(node.desc, expected)
def _multi_get_ancestors(self):
data = [
('2', []),
('21', ['2']),
('231', ['2', '23']),
]
for desc, expected in data:
nodes = self.model.objects.get(desc=desc).get_ancestors()
self.assertEqual([node.desc for node in nodes], expected)
def _multi_get_descendants(self):
data = [
('2', ['21', '22', '23', '231', '24']),
('23', ['231']),
('231', []),
('1', []),
('4', ['41']),
]
for desc, expected in data:
nodes = self.model.objects.get(desc=desc).get_descendants()
self.assertEqual([node.desc for node in nodes], expected)
def _multi_get_descendant_count(self):
data = [
('2', 5),
('23', 1),
('231', 0),
('1', 0),
('4', 1),
]
for desc, expected in data:
got = self.model.objects.get(desc=desc).get_descendant_count()
self.assertEqual(got, expected)
def _multi_is_sibling_of(self):
data = [
('2', '2', True),
('2', '1', True),
('21', '2', False),
('231', '2', False),
('22', '23', True),
('231', '23', False),
('231', '231', True),
]
for desc1, desc2, expected in data:
node1 = self.model.objects.get(desc=desc1)
node2 = self.model.objects.get(desc=desc2)
self.assertEqual(node1.is_sibling_of(node2), expected)
def _multi_is_child_of(self):
data = [
('2', '2', False),
('2', '1', False),
('21', '2', True),
('231', '2', False),
('231', '23', True),
('231', '231', False),
]
for desc1, desc2, expected in data:
node1 = self.model.objects.get(desc=desc1)
node2 = self.model.objects.get(desc=desc2)
self.assertEqual(node1.is_child_of(node2), expected)
def _multi_is_descendant_of(self):
data = [
('2', '2', False),
('2', '1', False),
('21', '2', True),
('231', '2', True),
('231', '23', True),
('231', '231', False),
]
for desc1, desc2, expected in data:
node1 = self.model.objects.get(desc=desc1)
node2 = self.model.objects.get(desc=desc2)
self.assertEqual(node1.is_descendant_of(node2), expected)
class TestAddChild(TestNonEmptyTree):
def _multi_add_child_to_leaf(self):
self.model.objects.get(desc=u'231').add_child(desc='2311')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 1),
(u'2311', 4, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_child_to_node(self):
self.model.objects.get(desc=u'2').add_child(desc='25')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'25', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
class TestAddSibling(TestNonEmptyTree):
def _multi_add_sibling_invalid_pos(self):
method = self.model.objects.get(desc=u'231').add_sibling
self.assertRaises(InvalidPosition, method, 'invalid_pos')
def _multi_add_sibling_missing_nodeorderby(self):
node_wchildren = self.model.objects.get(desc=u'2')
method = node_wchildren.add_sibling
self.assertRaises(MissingNodeOrderBy, method, 'sorted-sibling',
desc='aaa')
def _multi_add_sibling_last_root(self):
node_wchildren = self.model.objects.get(desc=u'2')
obj = node_wchildren.add_sibling('last-sibling', desc='5')
self.assertEqual(obj.get_depth(), 1)
self.assertEqual(node_wchildren.get_last_sibling().desc, u'5')
def _multi_add_sibling_last(self):
node = self.model.objects.get(desc=u'231')
obj = node.add_sibling('last-sibling', desc='232')
self.assertEqual(obj.get_depth(), 3)
self.assertEqual(node.get_last_sibling().desc, u'232')
def _multi_add_sibling_first_root(self):
node_wchildren = self.model.objects.get(desc=u'2')
obj = node_wchildren.add_sibling('first-sibling', desc='new')
self.assertEqual(obj.get_depth(), 1)
expected = [(u'new', 1, 0),
(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_first(self):
node_wchildren = self.model.objects.get(desc=u'23')
obj = node_wchildren.add_sibling('first-sibling', desc='new')
self.assertEqual(obj.get_depth(), 2)
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'new', 2, 0),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_left_root(self):
node_wchildren = self.model.objects.get(desc=u'2')
obj = node_wchildren.add_sibling('left', desc='new')
self.assertEqual(obj.get_depth(), 1)
expected = [(u'1', 1, 0),
(u'new', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_left(self):
node_wchildren = self.model.objects.get(desc=u'23')
obj = node_wchildren.add_sibling('left', desc='new')
self.assertEqual(obj.get_depth(), 2)
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'new', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_left_noleft_root(self):
node = self.model.objects.get(desc=u'1')
obj = node.add_sibling('left', desc='new')
self.assertEqual(obj.get_depth(), 1)
expected = [(u'new', 1, 0),
(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_left_noleft(self):
node = self.model.objects.get(desc=u'231')
obj = node.add_sibling('left', desc='new')
self.assertEqual(obj.get_depth(), 3)
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 2),
(u'new', 3, 0),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_right_root(self):
node_wchildren = self.model.objects.get(desc=u'2')
obj = node_wchildren.add_sibling('right', desc='new')
self.assertEqual(obj.get_depth(), 1)
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'new', 1, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_right(self):
node_wchildren = self.model.objects.get(desc=u'23')
obj = node_wchildren.add_sibling('right', desc='new')
self.assertEqual(obj.get_depth(), 2)
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'new', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_right_noright_root(self):
node = self.model.objects.get(desc=u'4')
obj = node.add_sibling('right', desc='new')
self.assertEqual(obj.get_depth(), 1)
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0),
(u'new', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_sibling_right_noright(self):
node = self.model.objects.get(desc=u'231')
obj = node.add_sibling('right', desc='new')
self.assertEqual(obj.get_depth(), 3)
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 2),
(u'231', 3, 0),
(u'new', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
class TestDelete(TestNonEmptyTree):
def setUp(self):
super(TestDelete, self).setUp()
for node in self.model.objects.all():
self.dep_model(node=node).save()
def _multi_delete_leaf(self):
self.model.objects.get(desc=u'231').delete()
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_delete_node(self):
self.model.objects.get(desc=u'23').delete()
expected = [(u'1', 1, 0),
(u'2', 1, 3),
(u'21', 2, 0),
(u'22', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_delete_root(self):
self.model.objects.get(desc=u'2').delete()
expected = [(u'1', 1, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_delete_filter_root_nodes(self):
self.model.objects.filter(desc__in=('2', '3')).delete()
expected = [(u'1', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_delete_filter_children(self):
self.model.objects.filter(
desc__in=('2', '23', '231')).delete()
expected = [(u'1', 1, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_delete_nonexistant_nodes(self):
self.model.objects.filter(desc__in=('ZZZ', 'XXX')).delete()
self.assertEqual(self.got(), self.unchanged)
def _multi_delete_same_node_twice(self):
self.model.objects.filter(
desc__in=('2', '2')).delete()
expected = [(u'1', 1, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_delete_all_root_nodes(self):
self.model.get_root_nodes().delete()
count = self.model.objects.count()
self.assertEqual(count, 0)
def _multi_delete_all_nodes(self):
self.model.objects.all().delete()
count = self.model.objects.count()
self.assertEqual(count, 0)
class TestMoveErrors(TestNonEmptyTree):
def _multi_move_invalid_pos(self):
node = self.model.objects.get(desc=u'231')
self.assertRaises(InvalidPosition, node.move, node, 'invalid_pos')
def _multi_move_to_descendant(self):
node = self.model.objects.get(desc=u'2')
target = self.model.objects.get(desc=u'231')
self.assertRaises(InvalidMoveToDescendant, node.move, target,
'first-sibling')
def _multi_move_missing_nodeorderby(self):
node = self.model.objects.get(desc=u'231')
self.assertRaises(MissingNodeOrderBy, node.move, node,
'sorted-child')
self.assertRaises(MissingNodeOrderBy, node.move, node,
'sorted-sibling')
class TestMoveSortedErrors(TestNonEmptyTree):
def _multi_nonsorted_move_in_sorted(self):
node = self.sorted_model.add_root(val1=3, val2=3, desc='zxy')
self.assertRaises(InvalidPosition, node.move, node, 'left')
class TestMoveLeafRoot(TestNonEmptyTree):
def _multi_move_leaf_last_sibling_root(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'2'), 'last-sibling')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0),
(u'231', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_first_sibling_root(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'2'), 'first-sibling')
expected = [(u'231', 1, 0),
(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_left_sibling_root(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'2'), 'left')
expected = [(u'1', 1, 0),
(u'231', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_right_sibling_root(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'2'), 'right')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'231', 1, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_last_child_root(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'2'), 'last-child')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'231', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_first_child_root(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'2'), 'first-child')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'231', 2, 0),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
class TestMoveLeaf(TestNonEmptyTree):
def _multi_move_leaf_last_sibling(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'22'), 'last-sibling')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'231', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_first_sibling(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'22'), 'first-sibling')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'231', 2, 0),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_left_sibling(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'22'), 'left')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'231', 2, 0),
(u'22', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_right_sibling(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'22'), 'right')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'231', 2, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_left_sibling_itself(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'231'), 'left')
self.assertEqual(self.got(), self.unchanged)
def _multi_move_leaf_last_child(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'22'), 'last-child')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 1),
(u'231', 3, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_leaf_first_child(self):
self.model.objects.get(desc=u'231').move(
self.model.objects.get(desc=u'22'), 'first-child')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 1),
(u'231', 3, 0),
(u'23', 2, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
class TestMoveBranchRoot(TestNonEmptyTree):
def _multi_move_branch_first_sibling_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2'), 'first-sibling')
expected = [(u'4', 1, 1),
(u'41', 2, 0),
(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_last_sibling_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2'), 'last-sibling')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_left_sibling_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2'), 'left')
expected = [(u'1', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_right_sibling_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2'), 'right')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'4', 1, 1),
(u'41', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_left_noleft_sibling_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2').get_first_sibling(), 'left')
expected = [(u'4', 1, 1),
(u'41', 2, 0),
(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_right_noright_sibling_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2').get_last_sibling(), 'right')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0),
(u'4', 1, 1),
(u'41', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_first_child_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2'), 'first-child')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'4', 2, 1),
(u'41', 3, 0),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_last_child_root(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='2'), 'last-child')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'4', 2, 1),
(u'41', 3, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
class TestMoveBranch(TestNonEmptyTree):
def _multi_move_branch_first_sibling(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23'), 'first-sibling')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'4', 2, 1),
(u'41', 3, 0),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_last_sibling(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23'), 'last-sibling')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'4', 2, 1),
(u'41', 3, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_left_sibling(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23'), 'left')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'4', 2, 1),
(u'41', 3, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_right_sibling(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23'), 'right')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'4', 2, 1),
(u'41', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_left_noleft_sibling(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23').get_first_sibling(), 'left')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'4', 2, 1),
(u'41', 3, 0),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_right_noright_sibling(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23').get_last_sibling(), 'right')
expected = [(u'1', 1, 0),
(u'2', 1, 5),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 1),
(u'231', 3, 0),
(u'24', 2, 0),
(u'4', 2, 1),
(u'41', 3, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_left_itself_sibling(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='4'), 'left')
self.assertEqual(self.got(), self.unchanged)
def _multi_move_branch_first_child(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23'), 'first-child')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 2),
(u'4', 3, 1),
(u'41', 4, 0),
(u'231', 3, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_branch_last_child(self):
self.model.objects.get(desc='4').move(
self.model.objects.get(desc='23'), 'last-child')
expected = [(u'1', 1, 0),
(u'2', 1, 4),
(u'21', 2, 0),
(u'22', 2, 0),
(u'23', 2, 2),
(u'231', 3, 0),
(u'4', 3, 1),
(u'41', 4, 0),
(u'24', 2, 0),
(u'3', 1, 0)]
self.assertEqual(self.got(), expected)
class TestTreeSorted(TestTreeBase):
def got(self):
return [(o.val1, o.val2, o.desc, o.get_depth(), o.get_children_count())
for o in self.sorted_model.get_tree()]
def _multi_add_root_sorted(self):
self.sorted_model.add_root(val1=3, val2=3, desc='zxy')
self.sorted_model.add_root(val1=1, val2=4, desc='bcd')
self.sorted_model.add_root(val1=2, val2=5, desc='zxy')
self.sorted_model.add_root(val1=3, val2=3, desc='abc')
self.sorted_model.add_root(val1=4, val2=1, desc='fgh')
self.sorted_model.add_root(val1=3, val2=3, desc='abc')
self.sorted_model.add_root(val1=2, val2=2, desc='qwe')
self.sorted_model.add_root(val1=3, val2=2, desc='vcx')
expected = [(1, 4, u'bcd', 1, 0),
(2, 2, u'qwe', 1, 0),
(2, 5, u'zxy', 1, 0),
(3, 2, u'vcx', 1, 0),
(3, 3, u'abc', 1, 0),
(3, 3, u'abc', 1, 0),
(3, 3, u'zxy', 1, 0),
(4, 1, u'fgh', 1, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_child_root_sorted(self):
root = self.sorted_model.add_root(val1=0, val2=0, desc='aaa')
root.add_child(val1=3, val2=3, desc='zxy')
root.add_child(val1=1, val2=4, desc='bcd')
root.add_child(val1=2, val2=5, desc='zxy')
root.add_child(val1=3, val2=3, desc='abc')
root.add_child(val1=4, val2=1, desc='fgh')
root.add_child(val1=3, val2=3, desc='abc')
root.add_child(val1=2, val2=2, desc='qwe')
root.add_child(val1=3, val2=2, desc='vcx')
expected = [(0, 0, u'aaa', 1, 8),
(1, 4, u'bcd', 2, 0),
(2, 2, u'qwe', 2, 0),
(2, 5, u'zxy', 2, 0),
(3, 2, u'vcx', 2, 0),
(3, 3, u'abc', 2, 0),
(3, 3, u'abc', 2, 0),
(3, 3, u'zxy', 2, 0),
(4, 1, u'fgh', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_add_child_nonroot_sorted(self):
get_node = lambda node_id: self.sorted_model.objects.get(pk=node_id)
root_id = self.sorted_model.add_root(val1=0, val2=0, desc='a').id
node_id = get_node(root_id).add_child(val1=0, val2=0, desc='ac').id
get_node(root_id).add_child(val1=0, val2=0, desc='aa')
get_node(root_id).add_child(val1=0, val2=0, desc='av')
get_node(node_id).add_child(val1=0, val2=0, desc='aca')
get_node(node_id).add_child(val1=0, val2=0, desc='acc')
get_node(node_id).add_child(val1=0, val2=0, desc='acb')
expected = [(0, 0, u'a', 1, 3),
(0, 0, u'aa', 2, 0),
(0, 0, u'ac', 2, 3),
(0, 0, u'aca', 3, 0),
(0, 0, u'acb', 3, 0),
(0, 0, u'acc', 3, 0),
(0, 0, u'av', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_sorted(self):
self.sorted_model.add_root(val1=3, val2=3, desc='zxy')
self.sorted_model.add_root(val1=1, val2=4, desc='bcd')
self.sorted_model.add_root(val1=2, val2=5, desc='zxy')
self.sorted_model.add_root(val1=3, val2=3, desc='abc')
self.sorted_model.add_root(val1=4, val2=1, desc='fgh')
self.sorted_model.add_root(val1=3, val2=3, desc='abc')
self.sorted_model.add_root(val1=2, val2=2, desc='qwe')
self.sorted_model.add_root(val1=3, val2=2, desc='vcx')
root_nodes = self.sorted_model.get_root_nodes()
target = root_nodes[0]
for node in root_nodes[1:]:
# because raw queries don't update django objects
node = self.sorted_model.objects.get(pk=node.id)
target = self.sorted_model.objects.get(pk=target.id)
node.move(target, 'sorted-child')
expected = [(1, 4, u'bcd', 1, 7),
(2, 2, u'qwe', 2, 0),
(2, 5, u'zxy', 2, 0),
(3, 2, u'vcx', 2, 0),
(3, 3, u'abc', 2, 0),
(3, 3, u'abc', 2, 0),
(3, 3, u'zxy', 2, 0),
(4, 1, u'fgh', 2, 0)]
self.assertEqual(self.got(), expected)
def _multi_move_sortedsibling(self):
# https://bitbucket.org/tabo/django-treebeard/issue/27
self.sorted_model.add_root(val1=3, val2=3, desc='zxy')
self.sorted_model.add_root(val1=1, val2=4, desc='bcd')
self.sorted_model.add_root(val1=2, val2=5, desc='zxy')
self.sorted_model.add_root(val1=3, val2=3, desc='abc')
self.sorted_model.add_root(val1=4, val2=1, desc='fgh')
self.sorted_model.add_root(val1=3, val2=3, desc='abc')
self.sorted_model.add_root(val1=2, val2=2, desc='qwe')
self.sorted_model.add_root(val1=3, val2=2, desc='vcx')
root_nodes = self.sorted_model.get_root_nodes()
target = root_nodes[0]
for node in root_nodes[1:]:
# because raw queries don't update django objects
node = self.sorted_model.objects.get(pk=node.id)
target = self.sorted_model.objects.get(pk=target.id)
node.val1 = node.val1 - 2
node.save()
node.move(target, 'sorted-sibling')
expected = [(0, 2, u'qwe', 1, 0),
(0, 5, u'zxy', 1, 0),
(1, 2, u'vcx', 1, 0),
(1, 3, u'abc', 1, 0),
(1, 3, u'abc', 1, 0),
(1, 3, u'zxy', 1, 0),
(1, 4, u'bcd', 1, 0),
(2, 1, u'fgh', 1, 0)]
self.assertEqual(self.got(), expected)
class TestMP_TreeAlphabet(TestCase):
def test_alphabet(self):
if not os.getenv('TREEBEARD_TEST_ALPHABET', False):
# run this test only if the enviroment variable is set
return
basealpha = numconv.BASE85
got_err = False
last_good = None
for alphabetlen in range(35, len(basealpha) + 1):
alphabet = basealpha[0:alphabetlen]
expected = [alphabet[0] + char for char in alphabet[1:]]
expected.extend([alphabet[1] + char for char in alphabet])
expected.append(alphabet[2] + alphabet[0])
# remove all nodes
MP_TestNodeAlphabet.objects.all().delete()
# change the model's alphabet
MP_TestNodeAlphabet.alphabet = alphabet
# insert root nodes
for pos in range(len(alphabet) * 2):
try:
MP_TestNodeAlphabet.add_root(numval=pos)
except:
got_err = True
break
if got_err:
break
got = [obj.path for obj in MP_TestNodeAlphabet.objects.all()]
if got != expected:
got_err = True
last_good = alphabet
print '\nThe best BASE85 based alphabet for your setup is: %s' \
% (last_good, )
class TestHelpers(TestTreeBase):
def setUp(self):
for model in (MP_TestNode, AL_TestNode, NS_TestNode):
model.load_bulk(BASE_DATA)
for node in model.get_root_nodes():
model.load_bulk(BASE_DATA, node)
model.add_root(desc='5')
def _multi_descendants_group_count_root(self):
expected = [(o.desc, o.get_descendant_count())
for o in self.model.get_root_nodes()]
got = [(o.desc, o.descendants_count)
for o in self.model.get_descendants_group_count()]
self.assertEqual(got, expected)
def _multi_descendants_group_count_node(self):
parent = self.model.get_root_nodes().get(desc='2')
expected = [(o.desc, o.get_descendant_count())
for o in parent.get_children()]
got = [(o.desc, o.descendants_count)
for o in self.model.get_descendants_group_count(parent)]
self.assertEqual(got, expected)
class TestMP_TreeSortedAutoNow(TestCase):
"""
The sorting mechanism used by treebeard when adding a node can fail if the
ordering is using an "auto_now" field
"""
def test_sorted_by_autonow_workaround(self):
"""
workaround
"""
import datetime
for i in range(1, 5):
MP_TestNodeSortedAutoNow.add_root(desc='node%d' % (i, ),
created=datetime.datetime.now())
def test_sorted_by_autonow_FAIL(self):
"""
This test asserts that we have a problem.
fix this, somehow
"""
MP_TestNodeSortedAutoNow.add_root(desc='node1')
self.assertRaises(ValueError, MP_TestNodeSortedAutoNow.add_root,
desc='node2')
class TestMP_TreeStepOverflow(TestCase):
def test_add_root(self):
method = MP_TestNodeSmallStep.add_root
for i in range(1, 10):
method()
self.assertRaises(PathOverflow, method)
def test_add_child(self):
root = MP_TestNodeSmallStep.add_root()
method = root.add_child
for i in range(1, 10):
method()
self.assertRaises(PathOverflow, method)
def test_add_sibling(self):
root = MP_TestNodeSmallStep.add_root()
for i in range(1, 10):
root.add_child()
method = root.get_last_child().add_sibling
positions = ('first-sibling', 'left', 'right', 'last-sibling')
for pos in positions:
self.assertRaises(PathOverflow, method, pos)
def test_move(self):
root = MP_TestNodeSmallStep.add_root()
for i in range(1, 10):
root.add_child()
newroot = MP_TestNodeSmallStep.add_root()
targets = [(root, ['first-child', 'last-child']),
(root.get_first_child(), ['first-sibling',
'left',
'right',
'last-sibling'])]
for target, positions in targets:
for pos in positions:
self.assertRaises(PathOverflow, newroot.move, target, pos)
class TestMP_TreeShortPath(TestCase):
"""
Here we test a tree with a very small path field (max_length=4) and a
steplen of 1
"""
def test_short_path(self):
obj = MP_TestNodeShortPath.add_root()
obj = obj.add_child().add_child().add_child()
self.assertRaises(PathOverflow, obj.add_child)
class TestMP_TreeFindProblems(TestTreeBase):
def test_find_problems(self):
model = MP_TestNodeAlphabet
model.alphabet = '01234'
model(path='01', depth=1, numchild=0, numval=0).save()
model(path='1', depth=1, numchild=0, numval=0).save()
model(path='111', depth=1, numchild=0, numval=0).save()
model(path='abcd', depth=1, numchild=0, numval=0).save()
model(path='qa#$%!', depth=1, numchild=0, numval=0).save()
model(path='0201', depth=2, numchild=0, numval=0).save()
model(path='020201', depth=3, numchild=0, numval=0).save()
model(path='03', depth=1, numchild=2, numval=0).save()
model(path='0301', depth=2, numchild=0, numval=0).save()
model(path='030102', depth=3, numchild=10, numval=0).save()
model(path='04', depth=10, numchild=1, numval=0).save()
model(path='0401', depth=20, numchild=0, numval=0).save()
evil_chars, bad_steplen, orphans, wrong_depth, wrong_numchild = \
model.find_problems()
self.assertEqual(['abcd', 'qa#$%!'],
[o.path for o in model.objects.filter(id__in=evil_chars)])
self.assertEqual(['1', '111'],
[o.path for o in model.objects.filter(id__in=bad_steplen)])
self.assertEqual(['0201', '020201'],
[o.path for o in model.objects.filter(id__in=orphans)])
self.assertEqual(['03', '0301', '030102'],
[o.path for o in model.objects.filter(id__in=wrong_numchild)])
self.assertEqual(['04', '0401'],
[o.path for o in model.objects.filter(id__in=wrong_depth)])
class TestMP_TreeFix(TestTreeBase):
def setUp(self):
super(TestMP_TreeFix, self).setUp()
self.expected_no_holes = {
MP_TestNodeShortPath: [
(u'1', u'b', 1, 2),
(u'11', u'u', 2, 1),
(u'111', u'i', 3, 1),
(u'1111', u'e', 4, 0),
(u'12', u'o', 2, 0),
(u'2', u'd', 1, 0),
(u'3', u'g', 1, 0),
(u'4', u'a', 1, 4),
(u'41', u'a', 2, 0),
(u'42', u'a', 2, 0),
(u'43', u'u', 2, 1),
(u'431', u'i', 3, 1),
(u'4311', u'e', 4, 0),
(u'44', u'o', 2, 0)],
MP_TestSortedNodeShortPath: [
(u'1', u'a', 1, 4),
(u'11', u'a', 2, 0),
(u'12', u'a', 2, 0),
(u'13', u'o', 2, 0),
(u'14', u'u', 2, 1),
(u'141', u'i', 3, 1),
(u'1411', u'e', 4, 0),
(u'2', u'b', 1, 2),
(u'21', u'o', 2, 0),
(u'22', u'u', 2, 1),
(u'221', u'i', 3, 1),
(u'2211', u'e', 4, 0),
(u'3', u'd', 1, 0),
(u'4', u'g', 1, 0)]}
self.expected_with_holes = {
MP_TestNodeShortPath: [
(u'1', u'b', 1L, 2L),
(u'13', u'u', 2L, 1L),
(u'134', u'i', 3L, 1L),
(u'1343', u'e', 4L, 0L),
(u'14', u'o', 2L, 0L),
(u'2', u'd', 1L, 0L),
(u'3', u'g', 1L, 0L),
(u'4', u'a', 1L, 4L),
(u'41', u'a', 2L, 0L),
(u'42', u'a', 2L, 0L),
(u'43', u'u', 2L, 1L),
(u'434', u'i', 3L, 1L),
(u'4343', u'e', 4L, 0L),
(u'44', u'o', 2L, 0L)],
MP_TestSortedNodeShortPath: [
(u'1', u'b', 1L, 2L),
(u'13', u'u', 2L, 1L),
(u'134', u'i', 3L, 1L),
(u'1343', u'e', 4L, 0L),
(u'14', u'o', 2L, 0L),
(u'2', u'd', 1L, 0L),
(u'3', u'g', 1L, 0L),
(u'4', u'a', 1L, 4L),
(u'41', u'a', 2L, 0L),
(u'42', u'a', 2L, 0L),
(u'43', u'u', 2L, 1L),
(u'434', u'i', 3L, 1L),
(u'4343', u'e', 4L, 0L),
(u'44', u'o', 2L, 0L)]}
def got(self, model):
return [(o.path, o.desc, o.get_depth(), o.get_children_count())
for o in model.get_tree()]
def add_broken_test_data(self, model):
model(path='4', depth=2, numchild=2, desc='a').save()
model(path='13', depth=1000, numchild=0, desc='u').save()
model(path='14', depth=4, numchild=500, desc='o').save()
model(path='134', depth=321, numchild=543, desc='i').save()
model(path='1343', depth=321, numchild=543, desc='e').save()
model(path='42', depth=1, numchild=1, desc='a').save()
model(path='43', depth=1000, numchild=0, desc='u').save()
model(path='44', depth=4, numchild=500, desc='o').save()
model(path='434', depth=321, numchild=543, desc='i').save()
model(path='4343', depth=321, numchild=543, desc='e').save()
model(path='41', depth=1, numchild=1, desc='a').save()
model(path='3', depth=221, numchild=322, desc='g').save()
model(path='1', depth=10, numchild=3, desc='b').save()
model(path='2', depth=10, numchild=3, desc='d').save()
def test_fix_tree_non_destructive(self):
for model in (MP_TestNodeShortPath, MP_TestSortedNodeShortPath):
self.add_broken_test_data(model)
model.fix_tree(destructive=False)
self.assertEqual(self.got(model), self.expected_with_holes[model])
model.find_problems()
def test_fix_tree_destructive(self):
for model in (MP_TestNodeShortPath, MP_TestSortedNodeShortPath):
self.add_broken_test_data(model)
model.fix_tree(destructive=True)
self.assertEqual(self.got(model), self.expected_no_holes[model])
model.find_problems()
class TestIssues(TestCase):
"test for http://code.google.com/p/django-treebeard/issues/detail?id=14"
def test_many_to_many_django_user_anonymous(self):
if not HAS_DJANGO_AUTH: # pragma: no cover
self.fail('this test needs django.contrib.auth in INSTALLED_APPS')
# Using AnonymousUser() in the querysets will expose non-treebeard
# related problems in Django 1.0
#
# Postgres:
# ProgrammingError: can't adapt
# SQLite:
# InterfaceError: Error binding parameter 4 - probably unsupported
# type.
# MySQL compared a string to an integer field:
# `treebeard_mp_testissue14_users`.`user_id` = 'AnonymousUser'
#
# Using a None field instead works (will be translated to IS NULL).
#
# anonuserobj = AnonymousUser()
anonuserobj = None
def qs_check(qs, expected):
self.assertEqual(
[o.name for o in qs],
expected)
user = User.objects.create_user('test_user', '[email protected]',
'testpasswd')
user.save()
root = MP_TestIssue14.add_root(name="the root node")
root.add_child(name="first")
second = root.add_child(name="second")
qs_check(root.get_children(), ['first', 'second'])
qs_check(root.get_children().filter(Q(name="first")), ['first'])
qs_check(root.get_children().filter(Q(users=user)), [])
qs_check(
root.get_children().filter(Q(name="first") | Q(users=user)),
['first'])
user = anonuserobj
qs_check(
root.get_children().filter(Q(name="first") | Q(users=user)),
['first', 'second'])
user = User.objects.get(username="test_user")
second.users.add(user)
qs_check(
root.get_children().filter(Q(name="first") | Q(users=user)),
['first', 'second'])
user = anonuserobj
qs_check(
root.get_children().filter(Q(name="first") | Q(users=user)),
['first'])
class TestModelAdmin(ModelAdmin):
form = MoveNodeForm
class TestMoveNodeForm(TestTreeBase):
tpl = (u'<tr><th><label for="id__position">Position:</label></th>'
'<td><select name="_position" id="id__position">\n'
'<option value="first-child">First child of</option>\n'
'<option value="left">Before</option>\n'
'<option value="right">After</option>\n'
'</select></td></tr>\n'
'<tr><th><label for="id__ref_node_id">Relative to:</label>'
'</th><td><select name="_ref_node_id" id="id__ref_node_id">\n'
'<option value="0">-- root --</option>\n')
def _multi_form_html_root_node(self):
self.model.load_bulk(BASE_DATA)
node = self.model.get_tree()[0]
form = MoveNodeForm(instance=node)
rtpl = self.tpl
self.assertEqual(['_position', '_ref_node_id'],
form.base_fields.keys())
for obj in self.model.get_tree():
if node != obj or obj.is_descendant_of(node):
rtpl += '<option value="%d">%sNode %d</option>\n' % (
obj.id, '. . ' * (obj.get_depth() - 1), obj.id)
rtpl += '</select></td></tr>'
formstr = unicode(form).replace(u' selected="selected"', u'')
self.assertEqual(rtpl, formstr)
def _multi_form_html_leaf_node(self):
self.model.load_bulk(BASE_DATA)
nodes = list(self.model.get_tree())
node = nodes[-1]
form = MoveNodeForm(instance=node)
rtpl = self.tpl
self.assertEqual(['_position', '_ref_node_id'],
form.base_fields.keys())
for obj in self.model.get_tree():
if node != obj or obj.is_descendant_of(node):
rtpl += '<option value="%d">%sNode %d</option>\n' % (
obj.id, '. . ' * (obj.get_depth() - 1), obj.id)
rtpl += '</select></td></tr>'
formstr = unicode(form).replace(u' selected="selected"', u'')
self.assertEqual(rtpl, formstr)
def _multi_admin_html(self):
tpl = ('<tr><th><label for="id_desc">Desc:</label>'
'</th><td><input id="id_desc" type="text" class="vTextField" '
'name="desc" maxlength="255" /></td></tr>\n'
'<tr><th><label for="id__position">Position:</label></th>'
'<td><select name="_position" id="id__position">\n'
'<option value="first-child">First child of</option>\n'
'<option value="left">Before</option>\n'
'<option value="right">After</option>\n'
'</select></td></tr>\n'
'<tr><th><label for="id__ref_node_id">Relative to:</label>'
'</th><td><select name="_ref_node_id" id="id__ref_node_id">\n'
'<option value="0">-- root --</option>\n'
'<option value="%d">Node %d</option>\n'
'<option value="%d">Node %d</option>\n'
'<option value="%d">. . Node %d</option>\n'
'<option value="%d">. . Node %d</option>\n'
'<option value="%d">. . Node %d</option>\n'
'<option value="%d">. . . . Node %d</option>\n'
'<option value="%d">. . Node %d</option>\n'
'<option value="%d">Node %d</option>\n'
'<option value="%d">Node %d</option>\n'
'<option value="%d">. . Node %d</option>\n'
'</select></td></tr>')
request = None
self.model.load_bulk(BASE_DATA)
for node in self.model.objects.all():
site = AdminSite()
ma = TestModelAdmin(self.model, site)
self.assertEqual(
['desc', '_position', '_ref_node_id'],
ma.get_form(request).base_fields.keys())
self.assertEqual(
[(None, {'fields': ['desc', '_position', '_ref_node_id']})],
ma.get_fieldsets(request))
self.assertEqual(
[(None, {'fields': ['desc', '_position', '_ref_node_id']})],
ma.get_fieldsets(request, node))
form = ma.get_form(request)()
ids = []
for obj in self.model.get_tree():
ids.extend([obj.id] * 2)
self.assertEqual(tpl % tuple(ids), unicode(form))
_load_test_methods(TestMoveNodeForm)
_load_test_methods(TestEmptyTree)
_load_test_methods(TestClassMethods)
_load_test_methods(TestSimpleNodeMethods)
_load_test_methods(TestAddChild)
_load_test_methods(TestAddSibling)
_load_test_methods(TestDelete)
_load_test_methods(TestMoveErrors)
_load_test_methods(TestMoveLeafRoot)
_load_test_methods(TestMoveLeaf)
_load_test_methods(TestMoveBranchRoot)
_load_test_methods(TestMoveBranch)
_load_test_methods(TestHelpers)
# we didn't create extra sorted-proxy models
_load_test_methods(TestMoveSortedErrors, proxy=False)
_load_test_methods(TestTreeSorted, proxy=False)
| Ernsting/django-treebeard | treebeard/tests/tests.py | Python | apache-2.0 | 71,903 | 0.000793 |
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2006-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
"""This module is used to parse a supervision graph Salome (XML) and convert it into
YACS calculation schema
This parsing is done with SalomeLoader class and its method load.
"""
import sys,os
try:
import cElementTree as ElementTree
except ImportError:
import ElementTree
#from sets import Set
Set=set
import graph
import pilot
import SALOMERuntime
class UnknownKind(Exception):pass
#global variables
debug=0
typeMap={}
objref=None
_containers={}
currentProc=None
def typeName(name):
"""Replace :: in type name by /"""
return "/".join(name.split("::"))
streamTypes={
'0':"Unknown",
'1':"CALCIUM_integer",
'3':"CALCIUM_real",
}
class SalomeLoader:
"""This class parses a Salome graph (version 3.2.x) and converts it into YACS schema.
The loadxml method parses xml file and returns a SalomeProc object
The load method calls the loadxml method and creates a YACS object of class Proc
"""
def loadxml(self,filename):
"""
Parse a XML file from Salome SUPERV and return a list of SalomeProc objects.
"""
tree = ElementTree.ElementTree(file=filename)
root = tree.getroot()
if debug:print "root.tag:",root.tag,root
procs=[]
if root.tag == "dataflow":
#only one dataflow
dataflow=root
if debug:print dataflow
proc=SalomeProc(dataflow)
procs.append(proc)
else:
#one or more dataflows. The graph contains macros.
#All macros are defined at the same level in the XML file.
for dataflow in root.findall("dataflow"):
if debug:print dataflow
proc=SalomeProc(dataflow)
if debug:print "dataflow name:",proc.name
procs.append(proc)
return procs
def load(self,filename):
"""Parse a SUPERV XML file (method loadxml) and return a YACS Proc object.
"""
global typeMap,_containers,objref,currentProc
typeMap.clear()
objref=None
_containers.clear()
currentProc=None
procs=self.loadxml(filename)
#Split the master proc from the possible macros.
proc=procs.pop(0)
#proc.display()
#Put macros in macro_dict
macro_dict={}
for p in procs:
if debug:print "proc_name:",p.name,"coupled_node:",p.coupled_node
macro_dict[p.name]=p
if debug:print filename
yacsproc=ProcNode(proc,macro_dict,filename)
return yacsproc.createNode()
class Container:
"""Class that defines a Salome Container"""
def __init__(self,mach,name):
self.mach=mach
self.name=name
self.components={}
def getName(self):
return self.mach+"/"+self.name
def getContainer(name):
if not name:
name="localhost/FactoryServer"
elif "/" not in name:
#no machine name: use localhost
name="localhost/"+name
return _containers.get(name)
def addContainer(name):
if not name:
mach="localhost"
name="FactoryServer"
elif "/" not in name:
#no machine name: use localhost for mach
mach="localhost"
else:
mach,name=name.split("/")
c=Container(mach,name)
_containers[mach+"/"+name]=c
return c
class Service:
"""Class for Service properties"""
class Parameter:
"""Class for Parameter properties"""
class Link:
"""Class for Link properties"""
class Data:
"""Class for Data properties"""
class Node:
"""Base class for all nodes """
label="Node: "
def __init__(self):
self.links=[] # list to store inputs as links
# a link has two attributes : from_node, the starting node
# to_node, the end node
self.datas=[]
self.inStreamLinks=[] #list of dataStream links connected to this node (in)
self.outStreamLinks=[] #list of dataStream links connected to this node (out)
self.node=None
def createNode(self):
raise NotImplementedError
def getInputPort(self,p):
return self.node.getInputPort(".".join(p.split("__")))
def getOutputPort(self,p):
if not self.node:
self.createNode()
return self.node.getOutputPort(".".join(p.split("__")))
def getInputDataStreamPort(self,p):
return self.node.getInputDataStreamPort(p)
def getOutputDataStreamPort(self,p):
return self.node.getOutputDataStreamPort(p)
def initPort(self,l):
if l.type == 7:
#double (CORBA::tk_double)
try:
self.getInputPort(l.tonodeparam).edInitDbl(l.value)
except:
reason="Problem in initialization, not expected type (double): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 3:
#int (CORBA::tk_long)
try:
self.getInputPort(l.tonodeparam).edInitInt(l.value)
except:
reason="Problem in initialization, not expected type (int): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 14:
#objref (CORBA::tk_objref)
try:
self.getInputPort(l.tonodeparam).edInitString(l.value)
except:
reason="Problem in initialization, not expected type (objref): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 18:
#string (CORBA::tk_string)
try:
self.getInputPort(l.tonodeparam).edInitString(l.value)
except:
reason="Problem in initialization, not expected type (string): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
else:
reason="Problem in initialization, not expected type (%s): %s %s" % (l.type,l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
class InlineNode(Node):
"""Inline Node salome : python function in self.codes[0]"""
def __init__(self):
Node.__init__(self)
self.codes=[]
def createNode(self):
r = pilot.getRuntime()
if self.fnames[0] == "?":
n=r.createScriptNode("",self.name)
else:
n=r.createFuncNode("",self.name)
n.setFname(self.fnames[0])
n.setScript(self.codes[0])
self.node=n
for para in self.service.inParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
n.edAddInputPort(para.name,typeMap[para.type])
for para in self.service.outParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
n.edAddOutputPort(para.name,typeMap[para.type])
for d in self.datas:
self.initPort(d)
return n
class ComputeNode(Node):
"""Compute Node Salome execute a component service"""
def createNode(self):
if self.node:
return self.node
r = pilot.getRuntime()
if self.container.components.has_key(self.sComponent):
#a node for this component already exists
compo_node=self.container.components[self.sComponent]
#It's a node associated with another node of the same component instance
#It is not sure that the yacs node has been created ????
master_node=compo_node.createNode()
n=master_node.createNode(self.name)
else:
#there is no node for this component. This node is first
self.container.components[self.sComponent]=self
#There is no component instance for this node
n=r.createCompoNode("",self.name)
n.setRef(self.sComponent)
n.setMethod(self.service.name)
self.node=n
#set the container for the node
if self.container:
n.getComponent().setContainer(currentProc.containerMap[self.container.getName()])
#add dataflow ports in out
for para in self.service.inParameters:
if not typeMap.has_key(para.type):
#Create the missing type and adds it into types table
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
n.edAddInputPort(para.name,typeMap[para.type])
for para in self.service.outParameters:
if not typeMap.has_key(para.type):
#Create the missing type and adds it into types table
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
pout=n.edAddOutputPort(para.name,typeMap[para.type])
#add datastream ports in and out
for para in self.inStreams:
if debug:print para.name,para.type,para.dependency,para.schema, para.interpolation,
if debug:print para.extrapolation
pin=n.edAddInputDataStreamPort(para.name,typeMap[streamTypes[para.type]])
for para in self.outStreams:
if debug:print para.name,para.type,para.dependency,para.values
pout=n.edAddOutputDataStreamPort(para.name,typeMap[streamTypes[para.type]])
for d in self.datas:
self.initPort(d)
return n
class ComposedNode(Node):
"""Composed Node Salome (base class)"""
def reduceLoop(self):
"""Transform a Salome graph with loops on one level
in a hierarchical graph.
The initial graph is in self.G. It is transformed in place.
"""
G=self.G
if debug:graph.display(G)
#invert the graph
I=graph.invert(G)
#graph.display(I)
#Get all loops and their internal nodes
loops={}
for n in G:
if n.kind == 4:
#Beginning of loop
loops[n]=graph.reachable(G,n)&graph.reachable(I,n.endloop)
n.inner_nodes=loops[n]
n.G=graph.InducedSubgraph(loops[n],G)
if debug:print "all loops"
if debug:print loops
#Get most external loops
outer_loops=loops.keys()
for l in loops:
for ll in outer_loops:
if loops[l] < loops[ll]:
#internal loop
outer_loops.remove(l)
ll.set_inner(l)
break
#In the end all remaining loops in outer_loops are the most external
if debug:print outer_loops
#We remove all internal nodes of most external loops
for l in outer_loops:
#Remove internal nodes
for n in loops[l]:
del G[n]
#Remove endloop node
suiv=G[l.endloop]
del G[l.endloop]
#Replace neighbours of loop by those of endloop
G[l]= suiv
#Try to transform incoming and outcoming links of endloop in incoming and
#outcoming links of internal nodes. Probably not complete.
inputs={}
for link in l.endloop.links:
if debug:print link.from_node,link.to_node,link.from_param,link.to_param
inputs[link.to_param]=link.from_node,link.from_param
for s in suiv:
for link in s.links:
if link.from_node == l.endloop:
link.from_node,link.from_param=inputs[link.from_param]
if debug:print link.from_node,link.to_node,link.from_param,link.to_param
if debug:graph.display(G)
#Apply the reduction treatment to most external loops (recurse)
for l in outer_loops:
l.reduceLoop()
def connect_macros(self,macro_dict):
"""This method connects the salome macros in macro_dict to the master YACS Proc.
"""
if debug:print "connect_macros",self.node,macro_dict
for node in self.G:
if isinstance(node,MacroNode):
#node is a macro, connect its definition to self.
#p is the Salome macro (class SalomeProc)
#node is the Salome MacroNode that has the subgraph p
#node.node is the YACS Bloc equivalent to node
p=macro_dict[node.coupled_node]
bloc=node.node
if debug:print "macronode:",node.name,node.coupled_node,p
#Create a hierarchical graph from the salome graph
G=p.create_graph()
node.G=G
for n in G:
#create an equivalent YACS node from each salome node
nod=n.createNode()
bloc.edAddChild(nod)
#Connect macros to node
node.connect_macros(macro_dict)
#add control links
for n in G:
for v in G[n]:
bloc.edAddCFLink(n.node,v.node)
#add dataflow links and initializations
for n in G:
#dataflow links
for l in n.links:
bloc.edAddLink(l.from_node.getOutputPort(l.from_param),
l.to_node.getInputPort(l.to_param))
#datastream links
for l in n.outStreamLinks:
pout=l.from_node.getOutputDataStreamPort(l.from_param)
pin=l.to_node.getInputDataStreamPort(l.to_param)
bloc.edAddLink(pout,pin)
#initializations
for l in n.datas:
if l.type == 7:
#double (CORBA::tk_double)
try:
n.getInputPort(l.tonodeparam).edInitDbl(l.value)
except:
reason="Problem in initialization, not expected type (double): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 3:
#int (CORBA::tk_long)
try:
n.getInputPort(l.tonodeparam).edInitInt(l.value)
except:
reason="Problem in initialization, not expected type (int): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 14:
#objref (CORBA::tk_objref)
try:
n.getInputPort(l.tonodeparam).edInitString(l.value)
except:
reason="Problem in initialization, not expected type (objref): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 18:
#string (CORBA::tk_string)
try:
n.getInputPort(l.tonodeparam).edInitString(l.value)
except:
reason="Problem in initialization, not expected type (string): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
else:
reason="Problem in initialization, not expected type (%s): %s %s" % (l.type,l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
class LoopNode(ComposedNode):
"""Objet qui simule le comportement d'une boucle Salome."""
def __init__(self):
ComposedNode.__init__(self)
self.inner_loops=[]
#inner_nodes contains internal nodes as in Salome (on one level with endloop nodes)
self.inner_nodes=[]
def set_node(self,node):
self.node=node
def set_inner(self,loop):
for i in self.inner_loops:
if loop.inner_nodes < i.inner_nodes:
#the loop is contained in i
i.set_inner(loop)
break
self.inner_loops.append(loop)
def createNode(self):
"""Create the equivalent YACS loop and store it in attribute node
A Salome loop has n input ports and output ports with exactly same names.
The head of loop has 3 functions : init, next, more which have almost same
interface. init and next have same interface : on input, input loop parameters
on output, output loop parameters (same as input). more has one more output parameter
in first place. This parameter says if the loop must go on or not.
The endloop has a function with the same interface as next.
To transform this node, create a YACS Bloc. In this bloc put a node for the init function
and a While node. In the while put all internal nodes plus 2 nodes for the next and more
functions.
"""
r = pilot.getRuntime()
bloop=r.createBloc(self.name)
#init node
init=r.createFuncNode("","init")
#print self.codes[0]
init.setScript(self.codes[0])
init.setFname(self.fnames[0])
for para in self.service.inParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
init.edAddInputPort(para.name,typeMap[para.type])
for para in self.service.outParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
init.edAddOutputPort(para.name,typeMap[para.type])
bloop.edAddChild(init)
self.init=init
wh=r.createWhileLoop(self.name)
bloop.edAddChild(wh)
blnode=r.createBloc(self.name)
wh.edSetNode(blnode)
cport=wh.edGetConditionPort()
cport.edInitBool(True)
#next node
next=r.createFuncNode("","next")
#print self.codes[2]
next.setScript(self.codes[2])
next.setFname(self.fnames[2])
for para in self.service.inParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
next.edAddInputPort(para.name,typeMap[para.type])
for para in self.service.outParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
next.edAddOutputPort(para.name,typeMap[para.type])
blnode.edAddChild(next)
self.next=next
#more node
more=r.createFuncNode("","more")
#print self.codes[1]
more.setScript(self.codes[1])
more.setFname(self.fnames[1])
for para in self.service.inParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
more.edAddInputPort(para.name,typeMap[para.type])
more.edAddOutputPort("DoLoop",typeMap["int"])
for para in self.service.outParameters:
if not typeMap.has_key(para.type):
#create the missing type and add it in type map
typeMap[para.type]= currentProc.createInterfaceTc("",para.type,[objref])
if not currentProc.typeMap.has_key(para.type):
currentProc.typeMap[para.type]=typeMap[para.type]
more.edAddOutputPort(para.name,typeMap[para.type])
blnode.edAddChild(more)
self.more=more
for para in self.service.outParameters:
bloop.edAddDFLink(init.getOutputPort(para.name),next.getInputPort(para.name))
for para in self.service.outParameters:
blnode.edAddDFLink(next.getOutputPort(para.name),more.getInputPort(para.name))
wh.edAddLink(more.getOutputPort("DoLoop"),wh.getInputPort("condition"))
for para in self.service.outParameters:
wh.edAddLink(more.getOutputPort(para.name),next.getInputPort(para.name))
self.node=bloop
for n in self.G:
node=n.createNode()
blnode.edAddChild(node)
for n in self.G:
for v in self.G[n]:
blnode.edAddCFLink(n.node,v.node)
for n in self.G:
for l in n.links:
try:
blnode.edAddDFLink(l.from_node.getOutputPort(l.from_param),
l.to_node.getInputPort(l.to_param))
except:
reason="Error while connecting output port: "+l.from_param+" from node: "+l.from_node.name
reason=reason+" to input port: "+l.to_param+" from node: "+l.to_node.name
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
return bloop
def getInputPort(self,p):
return self.init.getInputPort(p)
def getOutputPort(self,p):
return self.more.getOutputPort(p)
class Bloc(ComposedNode):
""" Composed node containing a set of connected nodes
"""
label="Bloc: "
def __init__(self):
Node.__init__(self)
self.nodes=[]
def addLink(self,node1,node2):
if node1 not in self.nodes:self.nodes.append(node1)
if node2 not in self.nodes:self.nodes.append(node2)
class MacroNode(Bloc):
"""Objet that represents a Salome Macro
"""
def createNode(self):
"""Create a YACS node (Bloc) equivalent to a Salome Macro """
r = pilot.getRuntime()
macro=r.createBloc(self.name)
self.node=macro
return macro
def is_loop(n):
"""Return true if n is a head loop node"""
return isinstance(n,LoopNode)
class ProcNode(ComposedNode):
"""Salome proc with its macros
The Salome proc is stored in attribute proc
The Salome macros are stored in attribute macro_dict ({})
"""
def __init__(self,proc,macro_dict,filename):
ComposedNode.__init__(self)
self.proc=proc
self.macro_dict=macro_dict
self.filename=filename
def createNode(self):
"""Create the YACS node (Proc) equivalent a Salome proc"""
global currentProc,objref
r = pilot.getRuntime()
#create_graph gives a hierarchical graph equivalent to the Salome proc
G=self.proc.create_graph()
self.G=G
#Create the YACS proc with its elements (types, nodes, containers)
p=r.createProc("pr")
self.node=p
currentProc=p
p.filename=self.filename
typeMap["double"]=p.typeMap["double"]
typeMap["float"]=p.typeMap["double"]
typeMap["int"]=p.typeMap["int"]
typeMap["short"]=p.typeMap["int"]
typeMap["long"]=p.typeMap["int"]
typeMap["string"]=p.typeMap["string"]
typeMap["char"]=p.typeMap["string"]
typeMap["boolean"]=p.typeMap["bool"]
typeMap["bool"]=p.typeMap["bool"]
objref=p.createInterfaceTc("IDL:omg.org/CORBA/Object:1.0","Object",[])
typeMap["objref"]=objref
typeMap["Unknown"]=p.createInterfaceTc("","Unknown",[])
typeMap["GEOM_Object"]=p.createInterfaceTc("IDL:GEOM/GEOM_Object:1.0","GEOM_Object",[objref])
typeMap["GEOM_Shape"]=typeMap["GEOM_Object"]
typeMap["CALCIUM_integer"]=p.createInterfaceTc("IDL:Ports/Calcium_Ports/Calcium_Integer_Port:1.0","CALCIUM_integer",[])
typeMap["CALCIUM_real"]=p.createInterfaceTc("IDL:Ports/Calcium_Ports/Calcium_Real_Port:1.0","CALCIUM_real",[])
typeMap["CALCIUM_double"]=p.createInterfaceTc("IDL:Ports/Calcium_Ports/Calcium_Double_Port:1.0","CALCIUM_double",[])
typeMap["CALCIUM_string"]=p.createInterfaceTc("IDL:Ports/Calcium_Ports/Calcium_String_Port:1.0","CALCIUM_string",[])
typeMap["CALCIUM_boolean"]=p.createInterfaceTc("IDL:Ports/Calcium_Ports/Calcium_Logical_Port:1.0","CALCIUM_boolean",[])
typeMap["SuperVisionTest::Adder"]=p.createInterfaceTc("","SuperVisionTest/Adder",[objref])
typeMap["Adder"]=typeMap["SuperVisionTest::Adder"]
currentProc.typeMap["Object"]=typeMap["objref"]
currentProc.typeMap["Unknown"]=typeMap["Unknown"]
currentProc.typeMap["GEOM_Object"]=typeMap["GEOM_Object"]
currentProc.typeMap["GEOM_Shape"]=typeMap["GEOM_Shape"]
currentProc.typeMap["CALCIUM_integer"]=typeMap["CALCIUM_integer"]
currentProc.typeMap["CALCIUM_real"]=typeMap["CALCIUM_real"]
#create all containers
for name,container in _containers.items():
cont=r.createContainer()
cont.setName(name)
cont.setProperty("hostname",container.mach)
cont.setProperty("container_name",container.name)
currentProc.containerMap[name]=cont
for n in G:
#each node in G creates an equivalent YACS node.
node=n.createNode()
p.edAddChild(node)
#Connect Salome macros to nodes of proc p.
self.connect_macros(self.macro_dict)
#add control links
for n in G:
for v in G[n]:
p.edAddCFLink(n.node,v.node)
#add dataflow links and initializations
for n in G:
#dataflow links
for l in n.links:
try:
p.edAddLink(l.from_node.getOutputPort(l.from_param),
l.to_node.getInputPort(l.to_param))
except:
reason="Error while connecting output port: "+l.from_param+" from node: "+l.from_node.name
reason=reason+" to input port: "+l.to_param+" from node: "+l.to_node.name
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
#datastream links
for l in n.outStreamLinks:
pout=l.from_node.getOutputDataStreamPort(l.from_param)
pin=l.to_node.getInputDataStreamPort(l.to_param)
p.edAddLink(pout,pin)
#initializations
for l in n.datas:
if l.type == 7:
#double (CORBA::tk_double)
try:
n.getInputPort(l.tonodeparam).edInitDbl(l.value)
except:
reason="Problem in initialization, not expected type (double): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 3:
#int (CORBA::tk_long)
port=n.getInputPort(l.tonodeparam)
try:
port.edInitInt(l.value)
except:
reason="Problem in initialization, not expected type (int): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 14:
#objref (CORBA::tk_objref)
try:
n.getInputPort(l.tonodeparam).edInitString(l.value)
except:
reason="Problem in initialization, not expected type (objref): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
elif l.type == 18:
#string (CORBA::tk_string)
try:
n.getInputPort(l.tonodeparam).edInitString(l.value)
except:
reason="Problem in initialization, not expected type (string): %s %s" % (l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
else:
reason="Problem in initialization, not expected type (%s): %s %s" % (l.type,l.tonodeparam,l.value)
currentProc.getLogger("parser").error(reason,currentProc.filename,-1)
return p
class SalomeProc(ComposedNode):
"""Salome proc with all its dataflow, datastream and control links
The object is built by parsing an XML file.
"""
def __init__(self,dataflow):
self.name="name"
self.parse(dataflow)
#self.links : list of dataflow links (Link objects)
#self.nodes : list of graph nodes
#self.node_dict : nodes dict ({name:node})
#self.datas : list of graph datas
#each node has 2 lists of datastream links (inStreams, outStreams)
def parse(self,dataflow):
if debug:print "All XML nodes"
for node in dataflow:
if debug:print node.tag,node
#Parse dataflow info-list
self.dataflow_info=self.parseService(dataflow.find("info-list/node/service"))
if debug:print self.dataflow_info
if debug:print self.dataflow_info.inParameters
if debug:print self.dataflow_info.outParameters
if debug:
for para in self.dataflow_info.inParameters:
print "inParam:",para.name,para.name.split("__",1)
self.name=dataflow.findtext("info-list/node/node-name")
self.coupled_node=dataflow.findtext("info-list/node/coupled-node")
if debug:print "All XML nodes dataflow/node-list"
nodes=[]
node_dict={}
#Parse all nodes
for n in dataflow.findall('node-list/node'):
#n is a node-list node
kind=n.findtext("kind")
comp=n.findtext("component-name")
name=n.findtext("node-name")
coupled_node=n.findtext("coupled-node")
interface=n.findtext("interface-name")
container=n.findtext("container")
#kind=1 : dataflow ?
#kind=2 : ?
#kind=9 : datastream graph ?
#kind=6 : ??
#kind=8 : ??
if kind == "0":
#It's a service
node=ComputeNode()
node.kind=0
node.sComponent = comp
node.interface=interface
node.container= getContainer(container)
if not node.container:
node.container=addContainer(container)
if debug:print "\tcontainer",node.container
elif kind == "3":
#It's a python function
node=InlineNode()
node.kind=3
codes=[]
fnames=[]
for pyfunc in n.findall("PyFunction-list/PyFunction"):
fnames.append(pyfunc.findtext("FuncName"))
codes.append(self.parsePyFunction(pyfunc))
node.fnames=fnames
node.codes=codes
elif kind == "4":
#It's a loop : make a LoopNode
#python functions (next, more, init) are found in codes
node=LoopNode()
node.kind=4
codes=[]
fnames=[]
for pyfunc in n.findall("PyFunction-list/PyFunction"):
fnames.append(pyfunc.findtext("FuncName"))
codes.append(self.parsePyFunction(pyfunc))
node.fnames=fnames
node.codes=codes
elif kind == "5":
#End of loop : make an InlineNode
node=InlineNode()
node.kind=5
codes=[]
fnames=[]
for pyfunc in n.findall("PyFunction-list/PyFunction"):
fnames.append(pyfunc.findtext("FuncName"))
codes.append(self.parsePyFunction(pyfunc))
node.fnames=fnames
node.codes=codes
elif kind == "10":
# It's a Macro node : make a MacroNode
node=MacroNode()
node.kind=10
else:
raise UnknownKind,kind
node.name=name
node.service=None
node.coupled_node=coupled_node
#Put nodes in a dict to ease search
node_dict[node.name]=node
if debug:print "\tnode-name",node.name
if debug:print "\tkind",node.kind,node.__class__.__name__
s=n.find("service")
if s:
node.service=self.parseService(s)
#Parse datastream ports
if debug:print "DataStream ports"
inStreams=[]
for indata in n.findall("DataStream-list/inParameter"):
inStreams.append(self.parseInData(indata))
node.inStreams=inStreams
outStreams=[]
outStreams_dict={}
for outdata in n.findall("DataStream-list/outParameter"):
p=self.parseOutData(outdata)
outStreams.append(p)
outStreams_dict[p.name]=p
node.outStreams=outStreams
node.outStreams_dict=outStreams_dict
if debug:print "\t++++++++++++++++++++++++++++++++++++++++++++"
nodes.append(node)
self.nodes=nodes
self.node_dict=node_dict
#Nodes parsing is finished.
#Parse dataflow and datastream links.
"""
<link>
<fromnode-name>Node_A_1</fromnode-name>
<fromserviceparameter-name>a_1</fromserviceparameter-name>
<tonode-name>Node_B_1</tonode-name>
<toserviceparameter-name>b_1</toserviceparameter-name>
<coord-list/>
</link>
"""
if debug:print "All XML nodes dataflow/link-list"
links=[]
if debug:print "\t++++++++++++++++++++++++++++++++++++++++++++"
for link in dataflow.findall('link-list/link'):
l=Link()
l.from_name=link.findtext("fromnode-name")
l.to_name=link.findtext("tonode-name")
l.from_param=link.findtext("fromserviceparameter-name")
l.to_param=link.findtext("toserviceparameter-name")
links.append(l)
if debug:print "\tfromnode-name",l.from_name
if debug:print "\tfromserviceparameter-name",l.from_param
if debug:print "\ttonode-name",l.to_name
if debug:print "\ttoserviceparameter-name",l.to_param
if debug:print "\t++++++++++++++++++++++++++++++++++++++++++++"
self.links=links
if debug:print "All XML nodes dataflow/data-list"
datas=[]
for data in dataflow.findall('data-list/data'):
d=self.parseData(data)
datas.append(d)
if debug:print "\ttonode-name",d.tonode
if debug:print "\ttoserviceparameter-name",d.tonodeparam
if debug:print "\tparameter-value",d.value
if debug:print "\tparameter-type",d.type
if debug:print "\t++++++++++++++++++++++++++++++++++++++++++++"
self.datas=datas
def parseService(self,s):
service=Service()
service.name=s.findtext("service-name")
if debug:print "\tservice-name",service.name
inParameters=[]
for inParam in s.findall("inParameter-list/inParameter"):
p=Parameter()
p.name=inParam.findtext("inParameter-name")
p.type=typeName(inParam.findtext("inParameter-type"))
if debug:print "\tinParameter-name",p.name
if debug:print "\tinParameter-type",p.type
inParameters.append(p)
service.inParameters=inParameters
if debug:print "\t++++++++++++++++++++++++++++++++++++++++++++"
outParameters=[]
for outParam in s.findall("outParameter-list/outParameter"):
p=Parameter()
p.name=outParam.findtext("outParameter-name")
p.type=typeName(outParam.findtext("outParameter-type"))
if debug:print "\toutParameter-name",p.name
if debug:print "\toutParameter-type",p.type
outParameters.append(p)
service.outParameters=outParameters
if debug:print "\t++++++++++++++++++++++++++++++++++++++++++++"
return service
def parseData(self,d):
da=Data()
da.tonode=d.findtext("tonode-name")
da.tonodeparam=d.findtext("toserviceparameter-name")
da.value=d.findtext("data-value/value")
da.type=eval(d.findtext("data-value/value-type"))
if da.type < 9:
da.value=eval(da.value)
return da
def parsePyFunction(self,pyfunc):
if debug:print pyfunc.tag,":",pyfunc
if debug:print "\tFuncName",pyfunc.findtext("FuncName")
text=""
for cdata in pyfunc.findall("PyFunc"):
if text:text=text+'\n'
if cdata.text != '?':
text=text+ cdata.text
return text
"""<inParameter-type>1</inParameter-type>
<inParameter-name>istream</inParameter-name>
<inParameter-dependency>2</inParameter-dependency>
<inParameter-schema>0</inParameter-schema>
<inParameter-interpolation>0</inParameter-interpolation>
<inParameter-extrapolation>0</inParameter-extrapolation>
</inParameter>
<outParameter>
<outParameter-type>1</outParameter-type>
<outParameter-name>ostream</outParameter-name>
<outParameter-dependency>2</outParameter-dependency>
<outParameter-values>0</outParameter-values>
</outParameter>
"""
def parseInData(self,d):
if debug:print d.tag,":",d
p=Parameter()
p.name=d.findtext("inParameter-name")
p.type=typeName(d.findtext("inParameter-type"))
p.dependency=d.findtext("inParameter-dependency")
p.schema=d.findtext("inParameter-schema")
p.interpolation=d.findtext("inParameter-interpolation")
p.extrapolation=d.findtext("inParameter-extrapolation")
if debug:print "\tinParameter-name",p.name
return p
def parseOutData(self,d):
if debug:print d.tag,":",d
p=Parameter()
p.name=d.findtext("outParameter-name")
p.type=typeName(d.findtext("outParameter-type"))
p.dependency=d.findtext("outParameter-dependency")
p.values=d.findtext("outParameter-values")
if debug:print "\toutParameter-name",p.name
return p
def create_graph(self):
#a graph is a dict {node:neighbours}
#neighbours is a Set of neighbour nodes (of course)
#for v in graph (python >= 2.3): iterate through graph nodes
#for v in graph[node] iterate through node neighbours
G={}
#create all nodes without neighbours
for n in self.nodes:
G[n]=Set()
#calculate neighbours with links
for link in self.links:
from_node=self.node_dict[link.from_name]
if link.from_param == "Gate" or link.to_param == "Gate":
#control link salome : add to_name node to neighbours
if debug:print "add control link",link.from_name,link.to_name
G[self.node_dict[link.from_name]].add(self.node_dict[link.to_name])
elif from_node.outStreams_dict.has_key(link.from_param):
# datastream link :
# 1- add link in link list
# 2- add in link references on from_node and to_node
if debug:print "add stream link",link.from_name,link.to_name
self.node_dict[link.to_name].inStreamLinks.append(link)
self.node_dict[link.from_name].outStreamLinks.append(link)
link.from_node=self.node_dict[link.from_name]
link.to_node=self.node_dict[link.to_name]
else:
# other salome link
# if link from Loop node to EndOfLoop node, we ignore it
# all others are kept
from_node=self.node_dict[link.from_name]
to_node=self.node_dict[link.to_name]
if isinstance(to_node,LoopNode):
# If it's the link from EndOfLoop to Loop , we ignore it
if to_node.coupled_node == from_node.name:
if debug:print "backlink loop:",from_node,to_node
#ignored
continue
if debug:print "add dataflow link",link.from_name,link.to_name
G[self.node_dict[link.from_name]].add(self.node_dict[link.to_name])
if link.from_param != "DoLoop" and link.to_param != "DoLoop":
#Links on DoLoop are used by Salome supervisor. We ignore them.
#Add in the link references on nodes (from_node and to_node)
#Add this link into the list of links of to_node node.
self.node_dict[link.to_name].links.append(link)
link.from_node=self.node_dict[link.from_name]
link.to_node=self.node_dict[link.to_name]
#In a Salome graph with loops, head node and end node are connected
#with 2 opposite links
#Store the endloop in attribute endloop of head node.
if link.from_param == "DoLoop" and link.to_param == "DoLoop" \
and is_loop(self.node_dict[link.from_name]) \
and isinstance(self.node_dict[link.to_name],InlineNode):
#Store the end loop inline node in attribute endloop
#self.node_dict[link.to_name] is the end node of the head loop node self.node_dict[link.from_name]
if debug:print "add loop",link.from_name,link.to_name
self.node_dict[link.from_name].endloop=self.node_dict[link.to_name]
self.node_dict[link.to_name].loop=self.node_dict[link.from_name]
for data in self.datas:
if debug:print "datas",data
self.node_dict[data.tonode].datas.append(data)
self.G=G
#Transform the graph in place
# Transform one level loops in hierarchical graph
self.reduceLoop()
#Return the hierarchical graph that can be transform into YACS objects.
return G
def display(self,suivi="sync"):
"""Display Salome proc with graphviz (dot file)"""
#to display : dot -Tpng salome.dot |display
f=file("salome.dot", 'w')
self.write_dot(f)
f.close()
cmd="dot -Tpng salome.dot |display" + (suivi == "async" and "&" or "")
os.system(cmd)
def write_dot(self,stream):
"""Dump Salome proc into stream with dot format"""
stream.write('digraph %s {\nnode [ style="filled" ]\n' % self.name)
for node in self.nodes:
label = "%s:%s"% (node.name,node.__class__.__name__)
color='green'
stream.write(' %s [fillcolor="%s" label=< %s >];\n' % (
id(node), color, label
))
for link in self.links:
from_node=self.node_dict[link.from_name]
to_node=self.node_dict[link.to_name]
stream.write(' %s -> %s;\n' % (id(from_node), id(to_node)))
stream.write("}\n")
def main():
import traceback
usage ="""Usage: %s salomeFile convertedFile
where salomeFile is the name of the input schema file (old Salome syntax)
and convertedFile is the name of the output schema file (new YACS syntax)
"""
try:
salomeFile=sys.argv[1]
convertedFile=sys.argv[2]
except :
print usage%(sys.argv[0])
sys.exit(3)
SALOMERuntime.RuntimeSALOME_setRuntime()
loader=SalomeLoader()
try:
p= loader.load(salomeFile)
s= pilot.SchemaSave(p)
s.save(convertedFile)
except:
traceback.print_exc(file=sys.stdout)
f=open(convertedFile,'w')
f.write("<proc></proc>\n")
sys.exit(2)
logger=p.getLogger("parser")
if not logger.isEmpty():
print logger.getStr()
sys.exit(1)
if __name__ == "__main__":
main()
| FedoraScientific/salome-yacs | src/salomeloader/salomeloader.py | Python | gpl-2.0 | 43,462 | 0.030578 |
#!/usr/bin/env python3
# for more info, see github.com/qguv/loadaverage
from load.loadaverage import main
| qguv/loadaverage | load/__init__.py | Python | gpl-3.0 | 107 | 0 |
from tokens.andd import And
from tokens.expression import Expression
from tokens.iff import Iff
from tokens.kfalse import ConstantFalse
from tokens.ktrue import ConstantTrue
from tokens.nop import Not
from tokens.orr import Or
from tokens.then import Then
from tokens.variable import Variable
class TokenParser:
"""This parser only works with atomic expressions,
so parenthesis are needed everywhere to group items"""
@staticmethod
def parse_expression(string):
# Separate parenthesis so they're new tokens
# Also convert [ or { to the same parenthesis (
for s in '([{':
string = string.replace(s, ' ( ')
for s in ')]}':
string = string.replace(s, ' ) ')
# Get all operators so we can iterate over them
#
# Note that the order here is important. We first need to replace long
# expressions, such as '<->' with their single character representations.
#
# If we didn't do this, after we tried to separate the tokens from other
# expressions by adding spaces on both sides of the operator, '->' would
# break '<->' turning it into '< ->', which would not be recognised.
#
# We add spaces between the tokens so it's easy to split them and identify them.
# Another way would be to iterate over the string and finding the tokens. Once
# identified, they'd be put, in order, on a different list. However, this is
# not as simple as the currently used approach.
operators = [Iff, Then, Not, Or, And, ConstantTrue, ConstantFalse]
# Find all the representations on the string and add surrounding spaces,
# this will allow us to call 'string.split()' to separate variable names
# from the operators so the user doesn't need to enter them separated
for operator in operators:
for representation in operator.representations:
string = string.replace(representation, ' '+operator.single_char_representation+' ')
# Get all the tokens
words = string.split()
# Store the found nested expressions on the stack
expressions_stack = [Expression()]
for w in words:
done = False
for operator in operators:
# We replaced all the operator with their single character representations. We
# don't need to check whether the current word (representation) is any of the
# available representations for this operator, since it's the single-character one.
if w == operator.single_char_representation:
expressions_stack[-1].add_token(operator())
done = True
break
if done:
pass
elif w == '(':
expressions_stack.append(Expression())
elif w == ')':
e = expressions_stack.pop()
expressions_stack[-1].add_token(e)
else:
expressions_stack[-1].add_token(Variable(w))
# Tokenize the top expression (this will also tokenize its children)
expressions_stack[0].tokenize()
# Return the top expression once it's completely valid
return expressions_stack[0]
| LonamiWebs/Py-Utils | logicmind/token_parser.py | Python | mit | 3,318 | 0.003617 |
from node import models
from django.forms import ModelForm
from . import cdmsportalfunc as cpf
from django.core.exceptions import ValidationError
from django import forms
class MoleculeForm(ModelForm):
class Meta:
model = models.Molecules
fields = '__all__'
class SpecieForm(ModelForm):
datearchived = forms.DateField(
widget=forms.TextInput(attrs={'readonly': 'readonly'})
)
dateactivated = forms.DateField(
widget=forms.TextInput(attrs={'readonly': 'readonly'})
)
class Meta:
model = models.Species
fields = '__all__'
class FilterForm(ModelForm):
class Meta:
model = models.QuantumNumbersFilter
fields = '__all__'
class XsamsConversionForm(forms.Form):
inurl = forms.URLField(
label='Input URL',
required=False,
widget=forms.TextInput(
attrs={'size': 50,
'title': 'Paste here a URL that delivers an XSAMS '
'document.',
}))
infile = forms.FileField()
format = forms.ChoiceField(
choices=[("RAD 3D", "RAD 3D"), ("CSV", "CSV")], )
def clean(self):
infile = self.cleaned_data.get('infile')
inurl = self.cleaned_data.get('inurl')
if (infile and inurl):
raise ValidationError('Give either input file or URL!')
if inurl:
try:
data = cpf.urlopen(inurl)
except Exception as err:
raise ValidationError('Could not open given URL: %s' % err)
elif infile:
data = infile
else:
raise ValidationError('Give either input file or URL!')
try:
self.cleaned_data['result'] = cpf.applyStylesheet2File(data)
except Exception as err:
raise ValidationError('Could not transform XML file: %s' % err)
return self.cleaned_data
| cpe/VAMDC-VALD | nodes/cdms/node/forms.py | Python | gpl-3.0 | 1,963 | 0 |
'''
- Leetcode problem: 675
- Difficulty: Hard
- Brief problem description:
You are asked to cut off trees in a forest for a golf event. The forest is represented as a non-negative 2D map, in this map:
0 represents the obstacle can't be reached.
1 represents the ground can be walked through.
The place with number bigger than 1 represents a tree can be walked through, and this positive number represents the tree's height.
In one step you can walk in any of the four directions top, bottom, left and right also when standing in a point which is a tree you can decide whether or not to cut off the tree.
You are asked to cut off all the trees in this forest in the order of tree's height - always cut off the tree with lowest height first. And after cutting, the original place has the tree will become a grass (value 1).
You will start from the point (0, 0) and you should output the minimum steps you need to walk to cut off all the trees. If you can't cut off all the trees, output -1 in that situation.
You are guaranteed that no two trees have the same height and there is at least one tree needs to be cut off.
Example 1:
Input:
[
[1,2,3],
[0,0,4],
[7,6,5]
]
Output: 6
Example 2:
Input:
[
[1,2,3],
[0,0,0],
[7,6,5]
]
Output: -1
Example 3:
Input:
[
[2,3,4],
[0,0,5],
[8,7,6]
]
Output: 6
Explanation: You started from the point (0,0) and you can cut off the tree in (0,0) directly without walking.
Constraints:
1 <= forest.length <= 50
1 <= forest[i].length <= 50
0 <= forest[i][j] <= 10^9
- Solution Summary:
1. Sort the trees by tree height
2. Calculate the shortest path by BFS
Time Complexity: O((RC)^2)
Space Complexity: O(R*C)
- Used Resources:
--- Bo Zhou
'''
class Solution:
def cutOffTree(self, forest: List[List[int]]) -> int:
treeList = []
for i in range(len(forest)):
for j in range(len(forest[0])):
if forest[i][j] > 1:
treeList.append((forest[i][j], (i, j)))
treeList.sort(key=lambda x: x[0])
totalDist = 0
startPoint = (0, 0)
for tree in treeList:
dist = self.shortestPath(forest, startPoint[0], startPoint[1], tree[1][0], tree[1][1])
if dist == -1:
return -1
else:
totalDist += dist
startPoint = (tree[1][0], tree[1][1])
return totalDist
def shortestPath(self, forest, sx, sy, tx, ty) -> int:
if sx == tx and sy == ty:
return 0
directs = [(0, 1), (0, -1), (1, 0), (-1, 0)]
visited = set()
dq = deque()
dq.append((sx, sy))
step = 0
while dq:
n = len(dq)
step += 1
for i in range(n):
x, y = dq.popleft()
for dx, dy in directs:
newX = x + dx
newY = y + dy
if newX == tx and newY == ty:
return step
elif 0 <= newX < len(forest) and 0 <= newY < len(forest[0]) and (newX, newY) not in visited and \
forest[newX][newY] != 0:
visited.add((newX, newY))
dq.append((newX, newY))
return -1
| bzhou26/leetcode_sol | p675_Cut_Off_Trees_for_Golf_Event.py | Python | mit | 3,256 | 0.002764 |
"""
Tests for game_theory/random.py
"""
import numpy as np
from numpy.testing import assert_allclose, assert_raises
from nose.tools import eq_, ok_
from quantecon.game_theory import (
random_game, covariance_game, random_pure_actions, random_mixed_actions
)
def test_random_game():
nums_actions = (2, 3, 4)
g = random_game(nums_actions)
eq_(g.nums_actions, nums_actions)
def test_covariance_game():
nums_actions = (2, 3, 4)
N = len(nums_actions)
rho = 0.5
g = covariance_game(nums_actions, rho=rho)
eq_(g.nums_actions, nums_actions)
rho = 1
g = covariance_game(nums_actions, rho=rho)
for a in np.ndindex(*nums_actions):
for i in range(N-1):
payoff_profile = g.payoff_profile_array[a]
assert_allclose(payoff_profile[i], payoff_profile[-1], atol=1e-8)
rho = -1 / (N - 1)
g = covariance_game(nums_actions, rho=rho)
for a in np.ndindex(*nums_actions):
assert_allclose(g.payoff_profile_array.sum(axis=-1),
np.zeros(nums_actions),
atol=1e-10)
def test_random_game_value_error():
nums_actions = () # empty
assert_raises(ValueError, random_game, nums_actions)
def test_covariance_game_value_error():
nums_actions = () # empty
assert_raises(ValueError, covariance_game, nums_actions, rho=0)
nums_actions = (2,) # length one
assert_raises(ValueError, covariance_game, nums_actions, rho=0)
nums_actions = (2, 3, 4)
rho = 1.1 # > 1
assert_raises(ValueError, covariance_game, nums_actions, rho)
rho = -1 # < -1/(N-1)
assert_raises(ValueError, covariance_game, nums_actions, rho)
def test_random_pure_actions():
nums_actions = (2, 3, 4)
N = len(nums_actions)
seed = 1234
action_profiles = [
random_pure_actions(nums_actions, seed) for i in range(2)
]
for i in range(N):
ok_(action_profiles[0][i] < nums_actions[i])
eq_(action_profiles[0], action_profiles[1])
def test_random_mixed_actions():
nums_actions = (2, 3, 4)
seed = 1234
action_profile = random_mixed_actions(nums_actions, seed)
eq_(tuple([len(action) for action in action_profile]), nums_actions)
if __name__ == '__main__':
import sys
import nose
argv = sys.argv[:]
argv.append('--verbose')
argv.append('--nocapture')
nose.main(argv=argv, defaultTest=__file__)
| oyamad/QuantEcon.py | quantecon/game_theory/tests/test_random.py | Python | bsd-3-clause | 2,411 | 0 |
"""
FREQUENCY FILE
-> Contains function pertaining to analyze a file based on frequency of characters or words.
"""
def call(Arguments):
"""
Entry point for all calls pertaining to frequency analysis
"""
# Storing arguments in a dictionary for easier reference
ArgumentsDictionary = {
"NAME" : Arguments[0],
"OPTION" : Arguments[1],
"KEY" : Arguments[2],
"FILE" : Arguments[3],
}
# Storing functions in a dictionary for simplicity
FunctionsDictionary = {
"c" : getCharacterFrequency,
"w" : getWordFrequency,
}
# Since the first two letter are "-" and "f" respectively
Option = ArgumentsDictionary["OPTION"][2:]
# Call the frequencyWork function to do the actual work
if Option in FunctionsDictionary:
return frequencyWork(FunctionsDictionary[Option], ArgumentsDictionary["FILE"], ArgumentsDictionary["KEY"])
else:
return 0
def frequencyWork(FunctionObject, FileName, Key):
"""
This function stores the data inside FILE into a List. Then calls the FunctionObject with the List and the Key
"""
# Read the enitre file and store it in a variable
try:
with open(FileName, "r") as File:
FileContents = File.read()
except:
print "Couldn't Open File"
return 0
# Split the contents of the file into a list
FileContents = FileContents.split()
# Call FunctionObject to work on the list with respect to the Key
return FunctionObject(FileContents, Key)
def getCharacterFrequency(List, Key):
"""
Analyses the List to detect occurences of Key (character)
"""
UpperToLowerOffset = 32 # Difference in ASCII value between Upper-Case and Lower-Case alphabets
# Analayze occurences for all characters
if Key == "*all":
# "A" = 65, "Z" = 90
for Number in range(65, 91):
Upper = chr(Number)
Lower = chr(Number + UpperToLowerOffset)
Count = {"UPPER" : 0, "LOWER" : 0}
# We have to check every word in the list
for Word in List:
if Upper in Word or Lower in Word:
# Since they exist in the word, we check every single character
for Character in Word:
if Upper == Character:
Count["UPPER"] += 1
elif Lower == Character:
Count["LOWER"] += 1
# Print Data for this Number
if Count["UPPER"] or Count["LOWER"]:
print "Count of \'%s\': %d" % (Lower, Count["LOWER"])
print "Count of \'%s\': %d" % (Upper, Count["UPPER"])
print
# Analyze Occurence for KEY
else:
# This is get character. Hence, even if the user passes a string, we use only the first letter of the string
Key = Key[0]
Count = 0
# Check for every word in the list
for Word in List:
if Key in Word:
# Since Key exists in Word, analyse the characters of Word for Key
for Character in Word:
if Key == Character:
Count += 1
print "Count of \'%s\': %d" % (Key, Count)
print
return 1
def getWordFrequency(List, Key):
"""
Analyses List to detect occurences of Key (word)
"""
Count = 0
# Check every word in the list
for Word in List:
# Remove Periods and Comma's from Word if any, and then compare with Key
if "." in Word or "," in Word:
Word = Word[:len(Word) - 1]
if Key == Word:
Count += 1
print "Count of \"%s\": %d" % (Key, Count)
print
return 1
""" END - FREQUENCY FILE """ | Yash3667/CipherAnalysis | Analysis/Frequency.py | Python | mit | 3,272 | 0.034535 |
# Copyright (c) 2011 Rackspace
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def capture_exception(body_func, *except_type):
try:
body_func()
return None
except except_type, e:
return e
def capture_type_error(func):
try:
func()
except TypeError, te:
msg = str(te)
if ("takes exactly 1 argument" in msg and "(0 given)" in msg) \
or "instance as first argument (got nothing instead)" in msg:
from proboscis.core import ProboscisTestMethodClassNotDecorated
raise ProboscisTestMethodClassNotDecorated()
else:
raise
| rackerlabs/python-proboscis | proboscis/compatability/exceptions_2_5.py | Python | apache-2.0 | 1,186 | 0.000843 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import easy_thumbnails.fields
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0035_auto_20141204_1708'),
]
operations = [
migrations.AlterField(
model_name='user',
name='how_found',
field=multiselectfield.db.fields.MultiSelectField(choices=[('internet', 'The Internet'), ('show', 'A presentation, brochure, flyer,... '), ('branch', 'The local branch'), ('member', 'Another member'), ('friends', 'Friends or family'), ('other', 'Other ...')], max_length=41, verbose_name='How did you hear about care4care ?'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='photo',
field=easy_thumbnails.fields.ThumbnailerImageField(upload_to='photos/', default='photos/default_avatar.png'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='offered_job',
field=multiselectfield.db.fields.MultiSelectField(choices=[('1', 'Visit home'), ('2', 'Companionship'), ('3', 'Transport by car'), ('4', 'Shopping'), ('5', 'House sitting'), ('6', 'Manual jobs'), ('7', 'Gardening'), ('8', 'Pet sitting'), ('9', 'Personal care'), ('a', 'Administrative'), ('b', 'Other ...')], max_length=21, verbose_name='What jobs you want to do?', blank=True),
preserve_default=True,
),
]
| MaximeBiset/care4care | main/migrations/0036_auto_20141204_1818.py | Python | agpl-3.0 | 1,581 | 0.001898 |
# -*- coding: cp1252 -*-
# No part of the content of this file was derived from the works of David Giffin.
##
# <p>Copyright © 2005-2008 Stephen John Machin, Lingfo Pty Ltd</p>
# <p>This module is part of the xlrd package, which is released under a BSD-style licence.</p>
#
# <p>Provides function(s) for dealing with Microsoft Excel dates.</p>
##
# 2008-10-18 SJM Fix bug in xldate_from_date_tuple (affected some years after 2099)
# The conversion from days to (year, month, day) starts with
# an integral "julian day number" aka JDN.
# FWIW, JDN 0 corresponds to noon on Monday November 24 in Gregorian year -4713.
# More importantly:
# Noon on Gregorian 1900-03-01 (day 61 in the 1900-based system) is JDN 2415080.0
# Noon on Gregorian 1904-01-02 (day 1 in the 1904-based system) is JDN 2416482.0
import datetime
_JDN_delta = (2415080 - 61, 2416482 - 1)
assert _JDN_delta[1] - _JDN_delta[0] == 1462
# Pre-calculate the datetime epochs for efficiency.
epoch_1904 = datetime.datetime(1904, 1, 1)
epoch_1900 = datetime.datetime(1899, 12, 31)
epoch_1900_minus_1 = datetime.datetime(1899, 12, 30)
class XLDateError(ValueError): pass
class XLDateNegative(XLDateError): pass
class XLDateAmbiguous(XLDateError): pass
class XLDateTooLarge(XLDateError): pass
class XLDateBadDatemode(XLDateError): pass
class XLDateBadTuple(XLDateError): pass
_XLDAYS_TOO_LARGE = (2958466, 2958466 - 1462) # This is equivalent to 10000-01-01
##
# Convert an Excel number (presumed to represent a date, a datetime or a time) into
# a tuple suitable for feeding to datetime or mx.DateTime constructors.
# @param xldate The Excel number
# @param datemode 0: 1900-based, 1: 1904-based.
# <br>WARNING: when using this function to
# interpret the contents of a workbook, you should pass in the Book.datemode
# attribute of that workbook. Whether
# the workbook has ever been anywhere near a Macintosh is irrelevant.
# @return Gregorian (year, month, day, hour, minute, nearest_second).
# <br>Special case: if 0.0 <= xldate < 1.0, it is assumed to represent a time;
# (0, 0, 0, hour, minute, second) will be returned.
# <br>Note: 1904-01-01 is not regarded as a valid date in the datemode 1 system; its "serial number"
# is zero.
# @throws XLDateNegative xldate < 0.00
# @throws XLDateAmbiguous The 1900 leap-year problem (datemode == 0 and 1.0 <= xldate < 61.0)
# @throws XLDateTooLarge Gregorian year 10000 or later
# @throws XLDateBadDatemode datemode arg is neither 0 nor 1
# @throws XLDateError Covers the 4 specific errors
def xldate_as_tuple(xldate, datemode):
if datemode not in (0, 1):
raise XLDateBadDatemode(datemode)
if xldate == 0.00:
return (0, 0, 0, 0, 0, 0)
if xldate < 0.00:
raise XLDateNegative(xldate)
xldays = int(xldate)
frac = xldate - xldays
seconds = int(round(frac * 86400.0))
assert 0 <= seconds <= 86400
if seconds == 86400:
hour = minute = second = 0
xldays += 1
else:
# second = seconds % 60; minutes = seconds // 60
minutes, second = divmod(seconds, 60)
# minute = minutes % 60; hour = minutes // 60
hour, minute = divmod(minutes, 60)
if xldays >= _XLDAYS_TOO_LARGE[datemode]:
raise XLDateTooLarge(xldate)
if xldays == 0:
return (0, 0, 0, hour, minute, second)
if xldays < 61 and datemode == 0:
raise XLDateAmbiguous(xldate)
jdn = xldays + _JDN_delta[datemode]
yreg = ((((jdn * 4 + 274277) // 146097) * 3 // 4) + jdn + 1363) * 4 + 3
mp = ((yreg % 1461) // 4) * 535 + 333
d = ((mp % 16384) // 535) + 1
# mp /= 16384
mp >>= 14
if mp >= 10:
return ((yreg // 1461) - 4715, mp - 9, d, hour, minute, second)
else:
return ((yreg // 1461) - 4716, mp + 3, d, hour, minute, second)
##
# Convert an Excel date/time number into a datetime.datetime object.
#
# @param xldate The Excel number
# @param datemode 0: 1900-based, 1: 1904-based.
#
# @return a datetime.datetime() object.
#
def xldate_as_datetime(xldate, datemode):
"""Convert an Excel date/time number into a datetime.datetime object."""
# Set the epoch based on the 1900/1904 datemode.
if datemode:
epoch = epoch_1904
else:
if xldate < 60:
epoch = epoch_1900
else:
# Workaround Excel 1900 leap year bug by adjusting the epoch.
epoch = epoch_1900_minus_1
# The integer part of the Excel date stores the number of days since
# the epoch and the fractional part stores the percentage of the day.
days = int(xldate)
fraction = xldate - days
# Get the the integer and decimal seconds in Excel's millisecond resolution.
seconds = int(round(fraction * 86400000.0))
seconds, milliseconds = divmod(seconds, 1000)
return epoch + datetime.timedelta(days, seconds, 0, milliseconds)
# === conversions from date/time to xl numbers
def _leap(y):
if y % 4: return 0
if y % 100: return 1
if y % 400: return 0
return 1
_days_in_month = (None, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
##
# Convert a date tuple (year, month, day) to an Excel date.
# @param year Gregorian year.
# @param month 1 <= month <= 12
# @param day 1 <= day <= last day of that (year, month)
# @param datemode 0: 1900-based, 1: 1904-based.
# @throws XLDateAmbiguous The 1900 leap-year problem (datemode == 0 and 1.0 <= xldate < 61.0)
# @throws XLDateBadDatemode datemode arg is neither 0 nor 1
# @throws XLDateBadTuple (year, month, day) is too early/late or has invalid component(s)
# @throws XLDateError Covers the specific errors
def xldate_from_date_tuple(date_tuple, datemode):
"""Create an excel date from a tuple of (year, month, day)"""
year, month, day = date_tuple
if datemode not in (0, 1):
raise XLDateBadDatemode(datemode)
if year == 0 and month == 0 and day == 0:
return 0.00
if not (1900 <= year <= 9999):
raise XLDateBadTuple("Invalid year: %r" % ((year, month, day),))
if not (1 <= month <= 12):
raise XLDateBadTuple("Invalid month: %r" % ((year, month, day),))
if day < 1 \
or (day > _days_in_month[month] and not(day == 29 and month == 2 and _leap(year))):
raise XLDateBadTuple("Invalid day: %r" % ((year, month, day),))
Yp = year + 4716
M = month
if M <= 2:
Yp = Yp - 1
Mp = M + 9
else:
Mp = M - 3
jdn = (1461 * Yp // 4) + ((979 * Mp + 16) // 32) + \
day - 1364 - (((Yp + 184) // 100) * 3 // 4)
xldays = jdn - _JDN_delta[datemode]
if xldays <= 0:
raise XLDateBadTuple("Invalid (year, month, day): %r" % ((year, month, day),))
if xldays < 61 and datemode == 0:
raise XLDateAmbiguous("Before 1900-03-01: %r" % ((year, month, day),))
return float(xldays)
##
# Convert a time tuple (hour, minute, second) to an Excel "date" value (fraction of a day).
# @param hour 0 <= hour < 24
# @param minute 0 <= minute < 60
# @param second 0 <= second < 60
# @throws XLDateBadTuple Out-of-range hour, minute, or second
def xldate_from_time_tuple(time_tuple):
"""Create an excel date from a tuple of (hour, minute, second)"""
hour, minute, second = time_tuple
if 0 <= hour < 24 and 0 <= minute < 60 and 0 <= second < 60:
return ((second / 60.0 + minute) / 60.0 + hour) / 24.0
raise XLDateBadTuple("Invalid (hour, minute, second): %r" % ((hour, minute, second),))
##
# Convert a datetime tuple (year, month, day, hour, minute, second) to an Excel date value.
# For more details, refer to other xldate_from_*_tuple functions.
# @param datetime_tuple (year, month, day, hour, minute, second)
# @param datemode 0: 1900-based, 1: 1904-based.
def xldate_from_datetime_tuple(datetime_tuple, datemode):
return (
xldate_from_date_tuple(datetime_tuple[:3], datemode)
+
xldate_from_time_tuple(datetime_tuple[3:])
)
| qianqians/meter | xlrd/xldate.py | Python | lgpl-2.1 | 7,895 | 0.004813 |
##############################################################################
#
# Copyright (C) 2015 Comunitea Servicios Tecnológicos All Rights Reserved
# $Omar Castiñeira Saavedra <[email protected]>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Partner custom",
'version': '1.0',
'category': 'Custom',
'description': """Several little customizations in partners""",
'author': 'Comunitea Servicios Tecnológicos',
'website': 'www.comunitea.com',
"depends": ['base', 'sale', 'l10n_es_partner', 'account',
'base_partner_sequence', 'stock', 'account_credit_control',
'purchase', 'prospective_customer', 'account_due_list',
'customer_lost', 'sale_margin_percentage', 'contacts',
'crm_phone_validation', 'commercial_rules', 'account_fiscal_position_partner_type'],
"data": ["views/invoice_pending_sales_view.xml",
"views/partner_view.xml",
"views/sale_view.xml",
"security/ir.model.access.csv",
"data/custom_partner_data.xml",
"security/groups.xml",
"data/parameters.xml"],
"installable": True
}
| Comunitea/CMNT_004_15 | project-addons/custom_partner/__manifest__.py | Python | agpl-3.0 | 1,917 | 0.000522 |
from edc_base.model.models import BaseListModel
class InfantVaccines (BaseListModel):
class Meta:
app_label = 'mb_list'
verbose_name = "Infant Vaccines"
verbose_name_plural = "Infant Vaccines"
| botswana-harvard/microbiome | microbiome/apps/mb_list/models/infant_vaccines.py | Python | gpl-2.0 | 224 | 0 |
"""Support for Tile device trackers."""
import logging
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from homeassistant.components.device_tracker.const import SOURCE_TYPE_GPS
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import DATA_COORDINATOR, DATA_TILE, DOMAIN
_LOGGER = logging.getLogger(__name__)
ATTR_ALTITUDE = "altitude"
ATTR_CONNECTION_STATE = "connection_state"
ATTR_IS_DEAD = "is_dead"
ATTR_IS_LOST = "is_lost"
ATTR_LAST_LOST_TIMESTAMP = "last_lost_timestamp"
ATTR_RING_STATE = "ring_state"
ATTR_TILE_NAME = "tile_name"
ATTR_VOIP_STATE = "voip_state"
DEFAULT_ATTRIBUTION = "Data provided by Tile"
DEFAULT_ICON = "mdi:view-grid"
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Tile device trackers."""
async_add_entities(
[
TileDeviceTracker(
hass.data[DOMAIN][DATA_COORDINATOR][entry.entry_id][tile_uuid], tile
)
for tile_uuid, tile in hass.data[DOMAIN][DATA_TILE][entry.entry_id].items()
]
)
async def async_setup_scanner(hass, config, async_see, discovery_info=None):
"""Detect a legacy configuration and import it."""
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_USERNAME: config[CONF_USERNAME],
CONF_PASSWORD: config[CONF_PASSWORD],
},
)
)
_LOGGER.info(
"Your Tile configuration has been imported into the UI; "
"please remove it from configuration.yaml"
)
return True
class TileDeviceTracker(CoordinatorEntity, TrackerEntity):
"""Representation of a network infrastructure device."""
def __init__(self, coordinator, tile):
"""Initialize."""
super().__init__(coordinator)
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._tile = tile
@property
def available(self):
"""Return if entity is available."""
return self.coordinator.last_update_success and not self._tile.dead
@property
def battery_level(self):
"""Return the battery level of the device.
Percentage from 0-100.
"""
return None
@property
def extra_state_attributes(self):
"""Return the device state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
return DEFAULT_ICON
@property
def location_accuracy(self):
"""Return the location accuracy of the device.
Value in meters.
"""
return self._tile.accuracy
@property
def latitude(self) -> float:
"""Return latitude value of the device."""
return self._tile.latitude
@property
def longitude(self) -> float:
"""Return longitude value of the device."""
return self._tile.longitude
@property
def name(self):
"""Return the name."""
return self._tile.name
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return f"tile_{self._tile.uuid}"
@property
def source_type(self):
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_GPS
@callback
def _handle_coordinator_update(self):
"""Respond to a DataUpdateCoordinator update."""
self._update_from_latest_data()
self.async_write_ha_state()
@callback
def _update_from_latest_data(self):
"""Update the entity from the latest data."""
self._attrs.update(
{
ATTR_ALTITUDE: self._tile.altitude,
ATTR_IS_LOST: self._tile.lost,
ATTR_LAST_LOST_TIMESTAMP: self._tile.lost_timestamp,
ATTR_RING_STATE: self._tile.ring_state,
ATTR_VOIP_STATE: self._tile.voip_state,
}
)
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
self._update_from_latest_data()
| w1ll1am23/home-assistant | homeassistant/components/tile/device_tracker.py | Python | apache-2.0 | 4,346 | 0.00046 |
# Copyright 2016: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import subprocess
import jsonschema
import mock
from rally import consts
from rally.plugins.common.hook import sys_call
from rally.task import hook
from tests.unit import fakes
from tests.unit import test
class SysCallHookTestCase(test.TestCase):
def test_validate(self):
hook.Hook.validate(
{
"name": "sys_call",
"description": "list folder",
"args": "ls",
"trigger": {
"name": "event",
"args": {
"unit": "iteration",
"at": [10]
}
}
}
)
def test_validate_error(self):
conf = {
"name": "sys_call",
"description": "list folder",
"args": {
"cmd": 50,
},
"trigger": {
"name": "event",
"args": {
"unit": "iteration",
"at": [10]
}
}
}
self.assertRaises(
jsonschema.ValidationError, hook.Hook.validate, conf)
@mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer)
@mock.patch("subprocess.Popen")
def test_run(self, mock_popen, mock_timer):
popen_instance = mock_popen.return_value
popen_instance.returncode = 0
task = mock.MagicMock()
sys_call_hook = sys_call.SysCallHook(task, "/bin/bash -c 'ls'",
{"iteration": 1}, "dummy_action")
sys_call_hook.run_sync()
sys_call_hook.validate_result_schema()
self.assertEqual(
{
"hook": "sys_call",
"description": "dummy_action",
"triggered_by": {"iteration": 1},
"started_at": fakes.FakeTimer().timestamp(),
"finished_at": fakes.FakeTimer().finish_timestamp(),
"status": consts.HookStatus.SUCCESS,
}, sys_call_hook.result())
mock_popen.assert_called_once_with(
["/bin/bash", "-c", "ls"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
@mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer)
@mock.patch("subprocess.Popen")
def test_run_error(self, mock_popen, mock_timer):
popen_instance = mock_popen.return_value
popen_instance.returncode = 1
popen_instance.stdout.read.return_value = b"No such file or directory"
task = mock.MagicMock()
sys_call_hook = sys_call.SysCallHook(task, "/bin/bash -c 'ls'",
{"iteration": 1}, "dummy_action")
sys_call_hook.run_sync()
sys_call_hook.validate_result_schema()
self.assertEqual(
{
"hook": "sys_call",
"description": "dummy_action",
"triggered_by": {"iteration": 1},
"started_at": fakes.FakeTimer().timestamp(),
"finished_at": fakes.FakeTimer().finish_timestamp(),
"status": consts.HookStatus.FAILED,
"error": [
"n/a",
"Subprocess returned 1",
"No such file or directory",
]
}, sys_call_hook.result())
mock_popen.assert_called_once_with(
["/bin/bash", "-c", "ls"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
| vganapath/rally | tests/unit/plugins/common/hook/test_sys_call.py | Python | apache-2.0 | 4,165 | 0 |
__author__ = 'DownGoat'
from pyfeedreader import database
database.init_db()
| DownGoat/PyFeedReader | db_init.py | Python | gpl-3.0 | 79 | 0 |
import pytest
from munerator.context import GameContext
from mock import Mock
@pytest.fixture
def gc():
gc = GameContext(Mock(), Mock(), Mock())
return gc
def test_player_name_client_id_translation(gc):
client_id = '1'
player_name = 'testplayer'
gc.clients = {
client_id: {
'name': player_name,
'client_id': client_id
}
}
data = {
'kind': 'say',
'player_name': player_name
}
contexted_data = gc.handle_event(data)
assert contexted_data['client_id'] == client_id
| aequitas/munerator | tests/test_context.py | Python | mit | 566 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import uuid
from keystoneauth1 import exceptions
from keystoneauth1 import loading
from keystoneauth1.tests.unit.loading import utils
class V3PasswordTests(utils.TestCase):
def setUp(self):
super(V3PasswordTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3password')
return loader.load_from_options(**kwargs)
def test_basic(self):
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
password = uuid.uuid4().hex
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
p = self.create(username=username,
user_domain_id=user_domain_id,
project_name=project_name,
project_domain_id=project_domain_id,
password=password)
pw_method = p.auth_methods[0]
self.assertEqual(username, pw_method.username)
self.assertEqual(user_domain_id, pw_method.user_domain_id)
self.assertEqual(password, pw_method.password)
self.assertEqual(project_name, p.project_name)
self.assertEqual(project_domain_id, p.project_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
password=uuid.uuid4().hex)
def test_without_project_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
password=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
project_name=uuid.uuid4().hex)
class TOTPTests(utils.TestCase):
def setUp(self):
super(TOTPTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3totp')
return loader.load_from_options(**kwargs)
def test_basic(self):
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
# passcode is 6 digits
passcode = ''.join(str(random.randint(0, 9)) for x in range(6))
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
p = self.create(username=username,
user_domain_id=user_domain_id,
project_name=project_name,
project_domain_id=project_domain_id,
passcode=passcode)
totp_method = p.auth_methods[0]
self.assertEqual(username, totp_method.username)
self.assertEqual(user_domain_id, totp_method.user_domain_id)
self.assertEqual(passcode, totp_method.passcode)
self.assertEqual(project_name, p.project_name)
self.assertEqual(project_domain_id, p.project_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
passcode=uuid.uuid4().hex)
def test_without_project_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
passcode=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
project_name=uuid.uuid4().hex)
class OpenIDConnectBaseTests(object):
plugin_name = None
def setUp(self):
super(OpenIDConnectBaseTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader(self.plugin_name)
return loader.load_from_options(**kwargs)
def test_base_options_are_there(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['client-id', 'client-secret', 'access-token-endpoint',
'access-token-type', 'openid-scope',
'discovery-endpoint']).issubset(
set([o.name for o in options]))
)
# openid-scope gets renamed into "scope"
self.assertIn('scope', [o.dest for o in options])
class OpenIDConnectClientCredentialsTests(OpenIDConnectBaseTests,
utils.TestCase):
plugin_name = "v3oidcclientcredentials"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['openid-scope']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
scope = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectPasswordTests(OpenIDConnectBaseTests, utils.TestCase):
plugin_name = "v3oidcpassword"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['username', 'password', 'openid-scope']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
username = uuid.uuid4().hex
password = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
scope = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(username=username,
password=password,
identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(username, oidc.username)
self.assertEqual(password, oidc.password)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectAuthCodeTests(OpenIDConnectBaseTests, utils.TestCase):
plugin_name = "v3oidcauthcode"
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['redirect-uri', 'code']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token_endpoint = uuid.uuid4().hex
redirect_uri = uuid.uuid4().hex
authorization_code = uuid.uuid4().hex
scope = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
client_id = uuid.uuid4().hex
client_secret = uuid.uuid4().hex
oidc = self.create(code=authorization_code,
redirect_uri=redirect_uri,
identity_provider=identity_provider,
protocol=protocol,
access_token_endpoint=access_token_endpoint,
client_id=client_id,
client_secret=client_secret,
scope=scope)
self.assertEqual(redirect_uri, oidc.redirect_uri)
self.assertEqual(authorization_code, oidc.code)
self.assertEqual(scope, oidc.scope)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token_endpoint, oidc.access_token_endpoint)
self.assertEqual(client_id, oidc.client_id)
self.assertEqual(client_secret, oidc.client_secret)
class OpenIDConnectAccessToken(utils.TestCase):
plugin_name = "v3oidcaccesstoken"
def setUp(self):
super(OpenIDConnectAccessToken, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader(self.plugin_name)
return loader.load_from_options(**kwargs)
def test_options(self):
options = loading.get_plugin_loader(self.plugin_name).get_options()
self.assertTrue(
set(['access-token']).issubset(
set([o.name for o in options]))
)
def test_basic(self):
access_token = uuid.uuid4().hex
identity_provider = uuid.uuid4().hex
protocol = uuid.uuid4().hex
oidc = self.create(access_token=access_token,
identity_provider=identity_provider,
protocol=protocol)
self.assertEqual(identity_provider, oidc.identity_provider)
self.assertEqual(protocol, oidc.protocol)
self.assertEqual(access_token, oidc.access_token)
class V3TokenlessAuthTests(utils.TestCase):
def setUp(self):
super(V3TokenlessAuthTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3tokenlessauth')
return loader.load_from_options(**kwargs)
def test_basic(self):
domain_id = uuid.uuid4().hex
domain_name = uuid.uuid4().hex
project_id = uuid.uuid4().hex
project_name = uuid.uuid4().hex
project_domain_id = uuid.uuid4().hex
project_domain_name = uuid.uuid4().hex
tla = self.create(domain_id=domain_id,
domain_name=domain_name,
project_id=project_id,
project_name=project_name,
project_domain_id=project_domain_id,
project_domain_name=project_domain_name)
self.assertEqual(domain_id, tla.domain_id)
self.assertEqual(domain_name, tla.domain_name)
self.assertEqual(project_id, tla.project_id)
self.assertEqual(project_name, tla.project_name)
self.assertEqual(project_domain_id, tla.project_domain_id)
self.assertEqual(project_domain_name, tla.project_domain_name)
def test_missing_parameters(self):
self.assertRaises(exceptions.OptionError,
self.create,
domain_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
domain_name=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_name=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_id=None)
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_name=None)
# only when a project_name is provided, project_domain_id will
# be use to uniquely identify the project. It's an invalid
# option when it's just by itself.
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_id=uuid.uuid4().hex)
# only when a project_name is provided, project_domain_name will
# be use to uniquely identify the project. It's an invalid
# option when it's just by itself.
self.assertRaises(exceptions.OptionError,
self.create,
project_domain_name=uuid.uuid4().hex)
self.assertRaises(exceptions.OptionError,
self.create,
project_name=uuid.uuid4().hex)
class V3ApplicationCredentialTests(utils.TestCase):
def setUp(self):
super(V3ApplicationCredentialTests, self).setUp()
self.auth_url = uuid.uuid4().hex
def create(self, **kwargs):
kwargs.setdefault('auth_url', self.auth_url)
loader = loading.get_plugin_loader('v3applicationcredential')
return loader.load_from_options(**kwargs)
def test_basic(self):
id = uuid.uuid4().hex
secret = uuid.uuid4().hex
app_cred = self.create(application_credential_id=id,
application_credential_secret=secret)
ac_method = app_cred.auth_methods[0]
self.assertEqual(id, ac_method.application_credential_id)
self.assertEqual(secret, ac_method.application_credential_secret)
def test_with_name(self):
name = uuid.uuid4().hex
secret = uuid.uuid4().hex
username = uuid.uuid4().hex
user_domain_id = uuid.uuid4().hex
app_cred = self.create(application_credential_name=name,
application_credential_secret=secret,
username=username,
user_domain_id=user_domain_id)
ac_method = app_cred.auth_methods[0]
self.assertEqual(name, ac_method.application_credential_name)
self.assertEqual(secret, ac_method.application_credential_secret)
self.assertEqual(username, ac_method.username)
self.assertEqual(user_domain_id, ac_method.user_domain_id)
def test_without_user_domain(self):
self.assertRaises(exceptions.OptionError,
self.create,
application_credential_name=uuid.uuid4().hex,
username=uuid.uuid4().hex,
application_credential_secret=uuid.uuid4().hex)
def test_without_name_or_id(self):
self.assertRaises(exceptions.OptionError,
self.create,
username=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex,
application_credential_secret=uuid.uuid4().hex)
def test_without_secret(self):
self.assertRaises(exceptions.OptionError,
self.create,
application_credential_id=uuid.uuid4().hex,
username=uuid.uuid4().hex,
user_domain_id=uuid.uuid4().hex)
| ctrlaltdel/neutrinator | vendor/keystoneauth1/tests/unit/loading/test_v3.py | Python | gpl-3.0 | 16,308 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# generated by wxGlade 0.6.3 on Fri Dec 16 03:13:38 2011
import wx, wx.richtext, wx.grid, wx.lib.intctrl
import sys, os, re
# Climb the tree to find out where we are
p = os.path.abspath(__file__)
t = ""
while t != "src":
(p, t) = os.path.split(p)
if p == "":
print "I have no idea where I am; this is ridiculous"
sys.exit(1)
sys.path.append(os.path.join(p,"src","lib"))
import project
from copy import deepcopy
from numpy import *
import subprocess
import socket
import handlerSubsystem
from hsubParsingUtils import parseCallString
import lib.handlers.handlerTemplates as ht
import lib.globalConfig
from lib.hsubConfigObjects import ExperimentConfig, RobotConfig
# begin wxGlade: extracode
# end wxGlade
CALIB_PORT = 23460
def drawParamConfigPane(target, method, proj):
if target.GetSizer() is not None:
target.GetSizer().Clear(deleteWindows=True)
list_sizer = wx.BoxSizer(wx.VERTICAL)
label_info = wx.StaticText(target, -1, method.comment)
label_info.SetFont(wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
static_line = wx.StaticLine(target, -1)
list_sizer.Add(label_info, 0, wx.ALL|wx.EXPAND, 5)
list_sizer.Add(static_line, 0, wx.EXPAND, 0)
param_controls = {}
for p in method.para:
#print "name: %s, para_type: %s, default: %s, value: %s" % (p.name, p.para_type, p.default, p.value)
item_sizer = wx.BoxSizer(wx.HORIZONTAL)
param_label = wx.StaticText(target, -1, "%s:" % p.name)
if p.para_type is None:
continue
if p.para_type.lower() == "region":
r_names = [r.name for r in proj.rfi.regions if r.name.lower() != "boundary" and not r.isObstacle]
param_controls[p] = wx.ComboBox(target, -1, choices=r_names, style=wx.CB_DROPDOWN)
if p.value is not None and p.value in r_names:
param_controls[p].SetStringSelection(p.value)
elif p.default is not None and p.value in r_names:
p.value = p.default
param_controls[p].SetStringSelection(p.default)
else:
p.value = r_names[0]
param_controls[p].SetSelection(0)
elif p.para_type.lower().startswith("bool"):
param_controls[p] = wx.CheckBox(target, -1, "")
if p.value is not None:
param_controls[p].SetValue(p.value)
elif p.default is not None:
p.value = p.default
param_controls[p].SetValue(p.default)
else:
p.value = "False"
param_controls[p].SetValue(False)
elif p.para_type.lower().startswith("int"):
param_controls[p] = wx.lib.intctrl.IntCtrl(target, -1, 0)
if p.min_val is not None:
param_controls[p].SetMin(p.min_val)
param_controls[p].SetLimited(True)
if p.max_val is not None:
param_controls[p].SetMax(p.max_val)
param_controls[p].SetLimited(True)
if p.value is not None:
param_controls[p].SetValue(p.value)
elif p.default is not None:
p.value = p.default
param_controls[p].SetValue(p.default)
else:
p.value = "0"
param_controls[p].SetValue(0)
else:
if p.value is not None:
param_controls[p] = wx.TextCtrl(target, -1, str(p.value))
elif p.default is not None:
p.value = p.default
param_controls[p] = wx.TextCtrl(target, -1, str(p.default))
else:
p.value = ""
param_controls[p] = wx.TextCtrl(target, -1, "")
param_label.SetToolTip(wx.ToolTip(p.desc))
item_sizer = wx.BoxSizer(wx.HORIZONTAL)
item_sizer.Add(param_label, 0, wx.ALL, 5)
item_sizer.Add(param_controls[p], 1, wx.ALL, 5)
list_sizer.Add(item_sizer, 0, wx.EXPAND, 0)
# TODO: is there a better way to do this?
def paramPaneCallback(event):
this_param = None
for p in method.para:
if event.GetEventObject() is param_controls[p]:
this_param = p
break
if this_param is None:
# Ignore; from another control (e.g. calib matrix)
return
this_param.setValue(param_controls[this_param].GetValue())
target.Bind(wx.EVT_TEXT, paramPaneCallback)
target.Bind(wx.EVT_COMBOBOX, paramPaneCallback)
target.Bind(wx.EVT_CHECKBOX, paramPaneCallback)
target.Bind(wx.lib.intctrl.EVT_INT, paramPaneCallback)
target.SetSizer(list_sizer)
target.Layout()
label_info.Wrap(list_sizer.GetSize()[0])
class regionTagsDialog(wx.Dialog):
def __init__(self, parent, *args, **kwds):
# begin wxGlade: regionTagsDialog.__init__
kwds["style"] = wx.DEFAULT_DIALOG_STYLE
wx.Dialog.__init__(self, *args, **kwds)
self.label_5 = wx.StaticText(self, wx.ID_ANY, "Tags:")
self.list_box_tags = wx.ListBox(self, wx.ID_ANY, choices=[], style=wx.LB_SINGLE)
self.button_add_tag = wx.Button(self, wx.ID_ADD, "")
self.button_remove_tag = wx.Button(self, wx.ID_REMOVE, "")
self.label_12 = wx.StaticText(self, wx.ID_ANY, "Regions:")
self.list_box_regions = wx.CheckListBox(self, wx.ID_ANY, choices=[])
self.static_line_2 = wx.StaticLine(self, wx.ID_ANY)
self.button_5 = wx.Button(self, wx.ID_OK, "")
self.button_8 = wx.Button(self, wx.ID_CANCEL, "")
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_LISTBOX, self.onClickTag, self.list_box_tags)
self.Bind(wx.EVT_BUTTON, self.onClickAddTag, self.button_add_tag)
self.Bind(wx.EVT_BUTTON, self.onClickRemoveTag, self.button_remove_tag)
# end wxGlade
self.proj = parent.proj
self.Bind(wx.EVT_CHECKLISTBOX, self.onCheckRegion, self.list_box_regions)
def __set_properties(self):
# begin wxGlade: regionTagsDialog.__set_properties
self.SetTitle("Edit Region Tags...")
self.SetSize((577, 419))
# end wxGlade
def __do_layout(self):
# begin wxGlade: regionTagsDialog.__do_layout
sizer_31 = wx.BoxSizer(wx.VERTICAL)
sizer_34 = wx.BoxSizer(wx.HORIZONTAL)
sizer_32 = wx.BoxSizer(wx.HORIZONTAL)
sizer_35 = wx.BoxSizer(wx.VERTICAL)
sizer_33 = wx.BoxSizer(wx.VERTICAL)
sizer_36 = wx.BoxSizer(wx.HORIZONTAL)
sizer_33.Add(self.label_5, 0, 0, 0)
sizer_33.Add(self.list_box_tags, 1, wx.TOP | wx.BOTTOM | wx.EXPAND, 5)
sizer_36.Add(self.button_add_tag, 0, 0, 0)
sizer_36.Add(self.button_remove_tag, 0, wx.LEFT, 10)
sizer_33.Add(sizer_36, 0, wx.EXPAND, 0)
sizer_32.Add(sizer_33, 1, wx.RIGHT | wx.EXPAND, 5)
sizer_35.Add(self.label_12, 0, 0, 0)
sizer_35.Add(self.list_box_regions, 1, wx.TOP | wx.EXPAND, 5)
sizer_32.Add(sizer_35, 1, wx.EXPAND, 0)
sizer_31.Add(sizer_32, 1, wx.ALL | wx.EXPAND, 5)
sizer_31.Add(self.static_line_2, 0, wx.EXPAND, 0)
sizer_34.Add((20, 20), 1, wx.EXPAND, 0)
sizer_34.Add(self.button_5, 0, wx.RIGHT, 10)
sizer_34.Add(self.button_8, 0, 0, 0)
sizer_31.Add(sizer_34, 0, wx.ALL | wx.EXPAND, 10)
self.SetSizer(sizer_31)
self.Layout()
# end wxGlade
def _tags2dialog(self, tags):
self.tags = tags
# Populate tags and regions
self.list_box_tags.Set(self.tags.keys())
if self.list_box_tags.GetCount() > 0:
self.list_box_tags.SetSelection(0)
self.button_remove_tag.Enable(True)
self.onClickTag(None)
else:
self.button_remove_tag.Enable(False)
def onCheckRegion(self, event):
tag = self.list_box_tags.GetStringSelection()
self.tags[tag] = self.list_box_regions.GetCheckedStrings()
event.Skip()
def onClickTag(self, event): # wxGlade: regionTagsDialog.<event_handler>
if event is not None:
tag = event.GetString()
else:
tag = self.list_box_tags.GetStringSelection()
if tag == '':
self.list_box_regions.Set([])
return
self.list_box_regions.Set([r.name for r in self.proj.rfi.regions if r.name.lower() != "boundary" and not r.isObstacle])
for i, rname in enumerate(self.list_box_regions.GetItems()):
self.list_box_regions.Check(i, rname in self.tags[tag])
if event is not None:
event.Skip()
def onClickAddTag(self, event): # wxGlade: regionTagsDialog.<event_handler>
# Ask the user for a tag name
name = wx.GetTextFromUser("Name:", "New Tag")
if name != "":
if name in self.tags:
wx.MessageBox("Tag with that name already exists.", "Invalid tag name",
style = wx.OK | wx.ICON_ERROR)
return
# If it's valid, add it, select it and enable it
self.list_box_tags.Insert(name, self.list_box_tags.GetCount())
self.list_box_tags.Select(self.list_box_tags.GetCount()-1)
self.tags[name] = []
self.onClickTag(None)
self.button_remove_tag.Enable(True)
event.Skip()
def onClickRemoveTag(self, event): # wxGlade: regionTagsDialog.<event_handler>
numel = self.list_box_tags.GetCount()
if numel > 0:
pos = self.list_box_tags.GetSelection()
tag = self.list_box_tags.GetStringSelection()
self.list_box_tags.Delete(pos)
del self.tags[tag]
if pos == numel - 1:
# If the very last element was deleted, move the selection up one
newpos = pos - 1
else:
newpos = pos
if newpos != -1:
self.list_box_tags.Select(newpos)
else:
self.button_remove_tag.Enable(False)
self.onClickTag(None)
event.Skip()
# end of class regionTagsDialog
class handlerConfigDialog(wx.Dialog):
def __init__(self, parent, *args, **kwds):
# begin wxGlade: handlerConfigDialog.__init__
kwds["style"] = wx.DEFAULT_DIALOG_STYLE
wx.Dialog.__init__(self, *args, **kwds)
self.panel_configs = wx.ScrolledWindow(self, wx.ID_ANY, style=wx.SUNKEN_BORDER | wx.TAB_TRAVERSAL)
self.button_defaults = wx.Button(self, wx.ID_ANY, "Reset to Defaults")
self.button_OK = wx.Button(self, wx.ID_OK, "")
self.button_1 = wx.Button(self, wx.ID_CANCEL, "")
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_BUTTON, self.onClickDefaults, self.button_defaults)
# end wxGlade
self.hsub = parent.hsub
self.proj = parent.proj
self.robot = parent.robot
def __set_properties(self):
# begin wxGlade: handlerConfigDialog.__set_properties
self.SetTitle("Configure XXXhandler")
self.panel_configs.SetScrollRate(10, 10)
self.button_OK.SetDefault()
# end wxGlade
def __do_layout(self):
# begin wxGlade: handlerConfigDialog.__do_layout
sizer_10 = wx.BoxSizer(wx.VERTICAL)
sizer_26 = wx.BoxSizer(wx.HORIZONTAL)
sizer_10.Add(self.panel_configs, 1, wx.EXPAND, 0)
sizer_26.Add(self.button_defaults, 0, wx.ALL, 5)
sizer_26.Add((20, 20), 1, 0, 0)
sizer_26.Add(self.button_OK, 0, wx.ALL, 5)
sizer_26.Add(self.button_1, 0, wx.ALL, 5)
sizer_10.Add(sizer_26, 0, wx.EXPAND, 0)
self.SetSizer(sizer_10)
sizer_10.Fit(self)
self.Layout()
# end wxGlade
def _onCalibEdit(self, event):
r = event.GetRow()
c = event.GetCol()
self.robot.calibration_matrix[r,c] = self.sheet.GetCellValue(r,c)
event.Skip()
def _onClickCalibrate(self, event):
event.Skip()
# Check that a region file is associated
if self.proj.rfi is None:
wx.MessageBox("Please define regions before calibrating.", "Error",
style = wx.OK | wx.ICON_ERROR)
return
# Check that an init handler is selected
if ht.InitHandler not in self.robot.handlers.keys():
wx.MessageBox("Please choose an Initialization Handler before calibrating.", "Error",
style = wx.OK | wx.ICON_ERROR)
return
# Create a copy of the project in its current state
proj_copy = deepcopy(self.proj)
# Create a temp config with one robot, with
# the currently selected init and pose handlers
cfg = ExperimentConfig()
robot = deepcopy(self.robot)
cfg.name = 'calibrate'
cfg.file_name = os.path.join(proj_copy.project_root, 'configs', 'calibrate.config')
cfg.complete = True
robot.name = "calibrate"
robot.handlers[ht.PoseHandler] = self.handler
# If the inithandler takes an init_region argument (i.e. playerstage, ODE), set it to the origin
try:
p = robot.handlers[ht.InitHandler].getMethodByName("__init__").getParaByName("init_region")
except ValueError:
pass
else:
p.setValue("__origin__")
cfg.main_robot = robot.name
cfg.robots.append(robot)
proj_copy.current_config = cfg.name
proj_copy.writeSpecFile(proj_copy.getFilenamePrefix()+".spec_calibtmp")
cfg.saveConfig()
print "Running calibration tool..."
proc = subprocess.Popen(["python", "-u", "-m", "lib.calibrate", proj_copy.getFilenamePrefix() + ".spec_calibtmp", str(CALIB_PORT)])
# Listen on socket for return value
host = 'localhost'
buf = 1024
addr = (host, CALIB_PORT)
UDPSock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
UDPSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
UDPSock.settimeout(0.1)
UDPSock.bind(addr)
while proc.returncode is None:
proc.poll()
# Wait for and receive a message from the calibration tool
try:
data, addrFrom = UDPSock.recvfrom(1024)
except socket.timeout:
wx.Yield()
else:
try:
self.robot.calibration_matrix = eval(data)
except SyntaxError:
print "ERROR: Received invalid data from calibration tool."
else:
# Update the display
wx.CallAfter(self._handler2dialog, self.handler)
break
print "Connection with calibration tool closed."
UDPSock.close()
# delete files
os.remove(proj_copy.getFilenamePrefix() + ".spec_calibtmp")
os.remove(os.path.join(proj_copy.project_root, "configs", "calibrate.config"))
def _handler2dialog(self, handler):
self.handler = handler
self.SetTitle("Configure %s.%s" % (handler.getType(), handler.name))
methodObj = handler.getMethodByName('__init__')
drawParamConfigPane(self.panel_configs, methodObj, self.proj)
# Add in calibration configuration pane for pose handler
if handler.h_type is ht.PoseHandler:
# Default to identity matrix
if self.robot.calibration_matrix is None:
self.robot.calibration_matrix = eye(3)
label = wx.StaticText(self.panel_configs, -1, "Calibration Matrix:")
self.sheet = wx.grid.Grid(self.panel_configs)
self.sheet.CreateGrid(3, 3)
self.sheet.SetColLabelSize(0)
self.sheet.SetRowLabelSize(0)
for x in range(0,3):
self.sheet.SetColFormatFloat(x)
for y in range(0,3):
self.sheet.SetCellValue(x, y, str(self.robot.calibration_matrix[x,y]))
button_calibrate = wx.Button(self.panel_configs, -1, "Run calibration tool...")
self.panel_configs.GetSizer().Add(label, 0, wx.ALL, 5)
self.Bind(wx.grid.EVT_GRID_CELL_CHANGE, self._onCalibEdit, self.sheet)
self.panel_configs.GetSizer().Add(self.sheet, 0, wx.EXPAND | wx.ALL, 5)
self.panel_configs.GetSizer().Add(button_calibrate, 0, wx.ALIGN_RIGHT | wx.ALL, 5)
self.Bind(wx.EVT_BUTTON, self._onClickCalibrate, button_calibrate)
# If this robot has a pre-defined calibration matrix, don't allow for calibration
if self.hsub.getRobotByType(self.robot.r_type).calibration_matrix is not None:
button_calibrate.SetLabel("Calibration is pre-defined by simulator.")
button_calibrate.Enable(False)
self.panel_configs.Layout()
# FIXME: this is a sizing hack, because I can't figure out how to get Fit() to work
a = self.panel_configs.GetSizer().GetMinSize()
b = self.GetSizer().GetMinSize()
self.SetSize((max(a[0],b[0]),a[1]+b[1]))
self.Refresh()
def onClickDefaults(self, event): # wxGlade: handlerConfigDialog.<event_handler>
print "Event handler `onClickDefaults' not implemented"
event.Skip()
# end of class handlerConfigDialog
class simSetupDialog(wx.Dialog):
def __init__(self, *args, **kwds):
# begin wxGlade: simSetupDialog.__init__
kwds["style"] = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER | wx.THICK_FRAME
wx.Dialog.__init__(self, *args, **kwds)
self.list_box_experiment_name = wx.ListBox(self, wx.ID_ANY, choices=[])
self.button_cfg_new = wx.Button(self, wx.ID_NEW, "")
self.button_cfg_import = wx.Button(self, wx.ID_ANY, "Import...")
self.button_cfg_delete = wx.Button(self, wx.ID_DELETE, "")
self.sizer_28_staticbox = wx.StaticBox(self, wx.ID_ANY, "Experiment Configurations:")
self.label_9 = wx.StaticText(self, wx.ID_ANY, "Experiment Name: ")
self.text_ctrl_sim_experiment_name = wx.TextCtrl(self, wx.ID_ANY, "")
self.label_2 = wx.StaticText(self, wx.ID_ANY, "Custom Propositions:")
self.list_box_init_customs = wx.CheckListBox(self, wx.ID_ANY, choices=["1", "2"])
self.label_2_copy = wx.StaticText(self, wx.ID_ANY, "Action Propositions:")
self.list_box_init_actions = wx.CheckListBox(self, wx.ID_ANY, choices=["3", "4"])
self.button_edit_region_tags = wx.Button(self, wx.ID_ANY, "Edit region tags...")
self.sizer_22_staticbox = wx.StaticBox(self, wx.ID_ANY, "Initial Conditions")
self.label_1 = wx.StaticText(self, wx.ID_ANY, "Robots:")
self.list_box_robots = wx.ListBox(self, wx.ID_ANY, choices=[])
self.button_addrobot = wx.Button(self, wx.ID_ANY, "Add robot...")
self.button_2 = wx.Button(self, wx.ID_ANY, "Configure robot...")
self.button_3 = wx.Button(self, wx.ID_ANY, "Remove robot")
self.button_defaultrobot = wx.Button(self, wx.ID_ANY, "Set as Main Robot")
self.button_4 = wx.Button(self, wx.ID_ANY, "Edit proposition mapping...")
self.sizer_1_staticbox = wx.StaticBox(self, wx.ID_ANY, "Execution Environment")
self.sizer_27_staticbox = wx.StaticBox(self, wx.ID_ANY, "Experiment Settings")
self.button_sim_apply = wx.Button(self, wx.ID_APPLY, "")
self.button_sim_ok = wx.Button(self, wx.ID_OK, "")
self.button_sim_cancel = wx.Button(self, wx.ID_CANCEL, "")
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_LISTBOX, self.onSimLoad, self.list_box_experiment_name)
self.Bind(wx.EVT_BUTTON, self.onConfigNew, self.button_cfg_new)
self.Bind(wx.EVT_BUTTON, self.onConfigImport, self.button_cfg_import)
self.Bind(wx.EVT_BUTTON, self.onConfigDelete, self.button_cfg_delete)
self.Bind(wx.EVT_TEXT, self.onSimNameEdit, self.text_ctrl_sim_experiment_name)
self.Bind(wx.EVT_BUTTON, self.onClickEditRegionTags, self.button_edit_region_tags)
self.Bind(wx.EVT_BUTTON, self.onClickAddRobot, self.button_addrobot)
self.Bind(wx.EVT_BUTTON, self.onClickConfigureRobot, self.button_2)
self.Bind(wx.EVT_BUTTON, self.onClickRemoveRobot, self.button_3)
self.Bind(wx.EVT_BUTTON, self.onSetMainRobot, self.button_defaultrobot)
self.Bind(wx.EVT_BUTTON, self.onClickEditMapping, self.button_4)
self.Bind(wx.EVT_BUTTON, self.onClickApply, self.button_sim_apply)
self.Bind(wx.EVT_BUTTON, self.onClickOK, self.button_sim_ok)
self.Bind(wx.EVT_BUTTON, self.onClickCancel, self.button_sim_cancel)
# end wxGlade
self.Bind(wx.EVT_CHECKLISTBOX, self.onCheckProp, self.list_box_init_customs)
self.Bind(wx.EVT_CHECKLISTBOX, self.onCheckProp, self.list_box_init_actions)
self.list_box_experiment_name.Bind(wx.EVT_LEFT_DOWN, self.onLoseFocusSimName)
self.Bind(wx.EVT_CLOSE, self.doClose)
if len(sys.argv) < 2:
print "You must specify a specification file."
print "Usage: %s [spec_file]" % sys.argv[0]
sys.exit(2)
# Load project
self.proj = project.Project()
self.proj.loadProject(sys.argv[1])
self.hsub = handlerSubsystem.HandlerSubsystem(None, self.proj.project_root)
# Set up the list of configs
self.list_box_experiment_name.Clear()
print "Loading handlers..."
self.hsub.loadAllHandlers()
print "Loading robots..."
self.hsub.loadAllRobots()
print "Loading experiment configs..."
self.hsub.loadAllConfigFiles()
for cfg in self.hsub.configs:
self.list_box_experiment_name.Append(cfg.name, cfg)
if self.proj.current_config!= "" :
self.list_box_experiment_name.SetStringSelection(self.proj.current_config)
# Check for case where no config files are present
if self.list_box_experiment_name.GetCount() == 0:
# Create blank default config
cfg = ExperimentConfig()
# TODO: Check for existing untitleds and add a number at the end (steal from reged)
cfg.name = "Untitled configuration"
cfg.file_name = os.path.join(self.hsub.config_path,cfg.name.replace(' ','_'))
# since this config is not loaded, we assume it is complete
self.hsub.configs.append(cfg)
self.list_box_experiment_name.Append(cfg.name, cfg)
# By default, select the first one
if self.list_box_experiment_name.GetSelection() < 0:
self.list_box_experiment_name.SetSelection(0)
self._cfg2dialog(self._getSelectedExperimentConfig())
def __set_properties(self):
# begin wxGlade: simSetupDialog.__set_properties
self.SetTitle("Configure Execution")
self.SetSize((935, 580))
self.text_ctrl_sim_experiment_name.SetMinSize((300, 27))
self.list_box_init_customs.SetSelection(0)
self.list_box_init_actions.SetSelection(0)
# end wxGlade
def __do_layout(self):
# begin wxGlade: simSetupDialog.__do_layout
sizer_6 = wx.BoxSizer(wx.HORIZONTAL)
sizer_12 = wx.BoxSizer(wx.VERTICAL)
sizer_13 = wx.BoxSizer(wx.HORIZONTAL)
self.sizer_27_staticbox.Lower()
sizer_27 = wx.StaticBoxSizer(self.sizer_27_staticbox, wx.VERTICAL)
self.sizer_1_staticbox.Lower()
sizer_1 = wx.StaticBoxSizer(self.sizer_1_staticbox, wx.HORIZONTAL)
sizer_2 = wx.BoxSizer(wx.HORIZONTAL)
sizer_4 = wx.BoxSizer(wx.VERTICAL)
sizer_3 = wx.BoxSizer(wx.VERTICAL)
self.sizer_22_staticbox.Lower()
sizer_22 = wx.StaticBoxSizer(self.sizer_22_staticbox, wx.VERTICAL)
sizer_23 = wx.BoxSizer(wx.HORIZONTAL)
sizer_17_copy = wx.BoxSizer(wx.VERTICAL)
sizer_17 = wx.BoxSizer(wx.VERTICAL)
sizer_30 = wx.BoxSizer(wx.HORIZONTAL)
sizer_29 = wx.BoxSizer(wx.VERTICAL)
self.sizer_28_staticbox.Lower()
sizer_28 = wx.StaticBoxSizer(self.sizer_28_staticbox, wx.VERTICAL)
sizer_29_copy = wx.BoxSizer(wx.HORIZONTAL)
sizer_6.Add((20, 20), 0, 0, 0)
sizer_29.Add((20, 20), 0, 0, 0)
sizer_28.Add((20, 10), 0, 0, 0)
sizer_28.Add(self.list_box_experiment_name, 1, wx.EXPAND, 0)
sizer_28.Add((20, 20), 0, 0, 0)
sizer_29_copy.Add(self.button_cfg_new, 0, 0, 0)
sizer_29_copy.Add((10, 20), 0, 0, 0)
sizer_29_copy.Add(self.button_cfg_import, 0, 0, 0)
sizer_29_copy.Add((10, 20), 0, 0, 0)
sizer_29_copy.Add(self.button_cfg_delete, 0, 0, 0)
sizer_28.Add(sizer_29_copy, 0, wx.EXPAND, 0)
sizer_28.Add((20, 10), 0, 0, 0)
sizer_29.Add(sizer_28, 1, wx.EXPAND, 0)
sizer_6.Add(sizer_29, 1, wx.EXPAND, 0)
sizer_6.Add((20, 20), 0, 0, 0)
sizer_12.Add((20, 20), 0, 0, 0)
sizer_30.Add(self.label_9, 0, wx.ALIGN_CENTER_VERTICAL, 0)
sizer_30.Add((20, 20), 0, 0, 0)
sizer_30.Add(self.text_ctrl_sim_experiment_name, 0, 0, 0)
sizer_12.Add(sizer_30, 0, wx.EXPAND, 0)
sizer_12.Add((20, 20), 0, 0, 0)
sizer_23.Add((5, 20), 0, 0, 0)
sizer_17.Add(self.label_2, 0, 0, 0)
sizer_17.Add(self.list_box_init_customs, 1, wx.EXPAND, 0)
sizer_23.Add(sizer_17, 1, wx.EXPAND, 0)
sizer_23.Add((20, 20), 0, 0, 0)
sizer_17_copy.Add(self.label_2_copy, 0, 0, 0)
sizer_17_copy.Add(self.list_box_init_actions, 1, wx.EXPAND, 0)
sizer_23.Add(sizer_17_copy, 1, wx.EXPAND, 0)
sizer_23.Add((5, 20), 0, 0, 0)
sizer_22.Add(sizer_23, 5, wx.EXPAND, 0)
sizer_22.Add(self.button_edit_region_tags, 0, wx.LEFT | wx.TOP | wx.ALIGN_CENTER_VERTICAL, 5)
sizer_27.Add(sizer_22, 1, wx.ALL | wx.EXPAND, 10)
sizer_3.Add(self.label_1, 0, 0, 0)
sizer_3.Add(self.list_box_robots, 1, wx.EXPAND, 0)
sizer_2.Add(sizer_3, 1, wx.EXPAND, 0)
sizer_2.Add((20, 20), 0, 0, 0)
sizer_4.Add(self.button_addrobot, 0, wx.BOTTOM, 5)
sizer_4.Add(self.button_2, 0, wx.BOTTOM, 5)
sizer_4.Add(self.button_3, 0, 0, 0)
sizer_4.Add((20, 30), 0, 0, 0)
sizer_4.Add(self.button_defaultrobot, 0, wx.BOTTOM, 5)
sizer_4.Add(self.button_4, 0, 0, 0)
sizer_2.Add(sizer_4, 1, wx.EXPAND, 0)
sizer_1.Add(sizer_2, 1, wx.EXPAND, 0)
sizer_27.Add(sizer_1, 0, wx.ALL | wx.EXPAND, 10)
sizer_12.Add(sizer_27, 1, wx.EXPAND, 0)
sizer_13.Add(self.button_sim_apply, 0, 0, 0)
sizer_13.Add((10, 20), 0, 0, 0)
sizer_13.Add(self.button_sim_ok, 0, 0, 0)
sizer_13.Add((10, 20), 0, 0, 0)
sizer_13.Add(self.button_sim_cancel, 0, 0, 0)
sizer_13.Add((10, 10), 0, 0, 0)
sizer_12.Add(sizer_13, 0, wx.ALIGN_RIGHT, 0)
sizer_12.Add((20, 10), 0, 0, 0)
sizer_6.Add(sizer_12, 2, wx.EXPAND, 0)
sizer_6.Add((20, 20), 0, 0, 0)
self.SetSizer(sizer_6)
self.Layout()
self.Centre()
# end wxGlade
def doClose(self, event):
# TODO: Check for dirty?
self.Destroy()
def _cfg2dialog(self, cfg):
self.text_ctrl_sim_experiment_name.SetValue(cfg.name)
# Set up the initial actions checklist as appropriate
self.list_box_init_actions.Set([])
for i, action in enumerate(self.proj.all_actuators):
self.list_box_init_actions.Insert(action, i)
if action in cfg.initial_truths:
self.list_box_init_actions.Check(i)
# Set up the initial customs checklist as appropriate
self.list_box_init_customs.Set([])
for i, custom in enumerate(self.proj.all_customs):
self.list_box_init_customs.Insert(custom, i)
if custom in cfg.initial_truths:
self.list_box_init_customs.Check(i)
# Set up the robots list
self.list_box_robots.Set([])
for i, robot in enumerate(cfg.robots):
if robot.name == cfg.main_robot:
self.list_box_robots.Insert(robot.name + " (Main)", i, robot)
else:
self.list_box_robots.Insert(robot.name, i, robot)
if len(cfg.robots) > 0:
self.list_box_robots.Select(0)
def onLoseFocusSimName(self, event):
if len(self.text_ctrl_sim_experiment_name.GetValue().strip()) == 0:
d = wx.MessageDialog(self, "Current experiment config needs a name. Please add one.", style = wx.OK | wx.ICON_ERROR)
d.ShowModal()
event.Skip(False)
return
if [c.name.strip() for c in self.hsub.configs].count(self.text_ctrl_sim_experiment_name.GetValue().strip()) > 1:
d = wx.MessageDialog(self, "Current experiment config has the same name with another config. Please change it.", style = wx.OK | wx.ICON_ERROR)
d.ShowModal()
event.Skip(False)
return
event.Skip()
def onSimLoad(self, event): # wxGlade: simSetupDialog.<event_handler>
cfg = event.GetClientData()
if cfg is not None:
self._cfg2dialog(cfg)
event.Skip()
def onConfigNew(self, event): # wxGlade: simSetupDialog.<event_handler>
# Create blank default config
cfg = ExperimentConfig()
# TODO: Check for existing untitleds and add a number at the end (steal from reged)
cfg.name = "Untitled configuration"
cfg.name = self._normalizeConfigName(cfg.name)
cfg.file_name = os.path.join(self.hsub.config_path, cfg.name.replace(' ','_'))
# since this config is not loaded, we assume it is complete
self.hsub.configs.append(cfg)
self.list_box_experiment_name.Append(cfg.name, cfg)
self.list_box_experiment_name.Select(self.list_box_experiment_name.GetCount()-1)
self._cfg2dialog(cfg)
event.Skip()
def _normalizeConfigName(self, name):
""" Make sure the config name is not taken already"""
# Make sure another config doesn't already have this name
while name in (r.name for r in self.hsub.configs):
name = name + " copy"
return name
def onConfigImport(self, event): # wxGlade: simSetupDialog.<event_handler>
file_name = wx.FileSelector("Import Config File", default_extension="config",
wildcard="Experiment config files (*.config)|*.config",
flags = wx.OPEN | wx.FILE_MUST_EXIST)
if file_name == "": return
# import the config file
cfg = ExperimentConfig()
cfg.fromFile(file_name, self.hsub)
cfg.name = self._normalizeConfigName(cfg.name)
self.hsub.configs.append(cfg)
self.list_box_experiment_name.Append(cfg.name, cfg)
self.list_box_experiment_name.Select(self.list_box_experiment_name.GetCount()-1)
self._cfg2dialog(cfg)
event.Skip()
def onConfigDelete(self, event): # wxGlade: simSetupDialog.<event_handler>
if self.list_box_experiment_name.GetSelection() == -1:
return
numel = self.list_box_experiment_name.GetCount()
if numel > 1: # don't allow deletion of final remaining element
# TODO: gray out button when no action possible
pos = self.list_box_experiment_name.GetSelection()
self.list_box_experiment_name.Delete(pos)
self.hsub.configs.pop(pos)
if pos == numel - 1:
# If the very last element was deleted, move the selection up one
newpos = pos - 1
else:
newpos = pos
self.list_box_experiment_name.Select(newpos)
self._cfg2dialog(self.list_box_experiment_name.GetClientData(newpos))
event.Skip()
def onSimNameEdit(self, event): # wxGlade: simSetupDialog.<event_handler>
pos = self.list_box_experiment_name.GetSelection()
self.list_box_experiment_name.GetClientData(pos).name = event.GetString().strip()
self.list_box_experiment_name.SetString(pos, event.GetString().strip())
event.Skip()
def onClickAddRobot(self, event): # wxGlade: simSetupDialog.<event_handler>
dlg = addRobotDialog(self, None, -1, "")
if dlg.ShowModal() != wx.ID_CANCEL:
obj = self._getSelectedExperimentConfig()
obj.robots += [dlg.robot]
if obj.main_robot == '':
obj.main_robot = dlg.robot.name
self._cfg2dialog(obj)
dlg.Destroy()
event.Skip()
def onClickConfigureRobot(self, event): # wxGlade: simSetupDialog.<event_handler>
# TODO: gray out button when no action possible
if self.list_box_robots.GetSelection() == -1:
return
dlg = addRobotDialog(self, None, -1, "")
pos = self.list_box_robots.GetSelection()
r = self.list_box_robots.GetClientData(pos)
dlg._robot2dialog(deepcopy(r), original=True)
if dlg.ShowModal() != wx.ID_CANCEL:
obj = self._getSelectedExperimentConfig()
# Update the name of the main robot if necessary
if obj.main_robot == obj.robots[pos].name:
obj.main_robot = dlg.robot.name
# Update any propmappings with new name, if necessary
for k,v in obj.prop_mapping.iteritems():
obj.prop_mapping[k] = re.sub("^"+r.name+"\.", dlg.robot.name+".", v)
obj.robots[pos] = dlg.robot
self._cfg2dialog(obj)
dlg.Destroy()
event.Skip()
def onClickRemoveRobot(self, event): # wxGlade: simSetupDialog.<event_handler>
if self.list_box_robots.GetSelection() == -1:
return
numel = self.list_box_robots.GetCount()
obj = self._getSelectedExperimentConfig()
# TODO: gray out button when no action possible
if numel > 0:
pos = self.list_box_robots.GetSelection()
# Clear the main_robot string if we're deleting that robot
if obj.main_robot == obj.robots[pos].name:
obj.main_robot = ''
obj.robots.pop(pos)
self._cfg2dialog(obj)
if pos == numel - 1:
# If the very last element was deleted, move the selection up one
newpos = pos -1
else:
newpos = pos
if pos != -1:
self.list_box_robots.Select(newpos)
event.Skip()
def onClickEditMapping(self, event): # wxGlade: simSetupDialog.<event_handler>
dlg = propMappingDialog(self, None, -1, "")
obj = self._getSelectedExperimentConfig()
dlg._mapping2dialog(deepcopy(obj.prop_mapping))
if dlg.ShowModal() != wx.ID_CANCEL:
obj.prop_mapping = dlg.mapping
dlg.Destroy()
event.Skip()
def onClickApply(self, event): # wxGlade: simSetupDialog.<event_handler>
# Get the current experiment config
self.proj.current_config = self._getSelectedExperimentConfig().name
self.hsub.setExecutingConfig(self.proj.current_config)
if len(self.hsub.executing_config.robots) == 0:
d = wx.MessageDialog(self, "There is no robot in the current experiment config. Please add one before saving.", style = wx.OK | wx.ICON_ERROR)
d.ShowModal()
event.Skip(False)
return
if len(self.hsub.executing_config.name) == 0:
d = wx.MessageDialog(self, "Current experiment config needs a name. Please add one before saving.", style = wx.OK | wx.ICON_ERROR)
d.ShowModal()
event.Skip(False)
return
if [c.name.strip() for c in self.hsub.configs].count(self.text_ctrl_sim_experiment_name.GetValue().strip()) > 1:
d = wx.MessageDialog(self, "Current experiment config has the same name with another config. Please change it.", style = wx.OK | wx.ICON_ERROR)
d.ShowModal()
event.Skip(False)
return
# clean up prop_mapping of the current executing config
default_prop_mapping = self.hsub.getDefaultPropMapping(self.proj.all_sensors, self.proj.all_actuators)
self.hsub.executing_config.normalizePropMapping(default_prop_mapping)
# Save the config files
self.hsub.saveAllConfigFiles()
# Save the name of the currently active config in the spec file
self.proj.writeSpecFile()
event.Skip()
def onClickOK(self, event): # wxGlade: simSetupDialog.<event_handler>
self.onClickApply(event)
# Clean up
if event.GetSkipped():
self.doClose(event)
event.Skip()
def _getSelectedExperimentConfig(self):
pos = self.list_box_experiment_name.GetSelection()
obj = self.list_box_experiment_name.GetClientData(pos)
return obj
def onCheckProp(self, event): # wxGlade: simSetupDialog.<event_handler>
obj = event.GetEventObject()
i = event.GetInt()
newstate = obj.IsChecked(i)
name = obj.GetString(i)
obj = self._getSelectedExperimentConfig()
if newstate == True:
obj.initial_truths += [name]
else:
obj.initial_truths.remove(name)
event.Skip()
def onSetMainRobot(self, event): # wxGlade: simSetupDialog.<event_handler>
pos = self.list_box_robots.GetSelection()
obj = self.list_box_robots.GetClientData(pos)
if obj is None:
return
self._getSelectedExperimentConfig().main_robot = obj.name
self._cfg2dialog(self._getSelectedExperimentConfig())
self.list_box_robots.SetSelection(pos)
event.Skip()
def onClickCancel(self, event): # wxGlade: simSetupDialog.<event_handler>
# Clean up
self.doClose(event)
event.Skip()
def onClickEditRegionTags(self, event): # wxGlade: simSetupDialog.<event_handler>
dlg = regionTagsDialog(self, None, -1, "")
obj = self._getSelectedExperimentConfig()
dlg._tags2dialog(deepcopy(obj.region_tags))
if dlg.ShowModal() != wx.ID_CANCEL:
obj.region_tags = dlg.tags
dlg.Destroy()
event.Skip()
# end of class simSetupDialog
class addRobotDialog(wx.Dialog):
def __init__(self, parent, *args, **kwds):
# begin wxGlade: addRobotDialog.__init__
kwds["style"] = wx.DEFAULT_DIALOG_STYLE
wx.Dialog.__init__(self, *args, **kwds)
self.label_3 = wx.StaticText(self, wx.ID_ANY, "Robot type:")
self.combo_box_robottype = wx.ComboBox(self, wx.ID_ANY, choices=[], style=wx.CB_DROPDOWN | wx.CB_READONLY)
self.label_4 = wx.StaticText(self, wx.ID_ANY, "Robot name:")
self.text_ctrl_robotname = wx.TextCtrl(self, wx.ID_ANY, "")
self.static_line_1 = wx.StaticLine(self, wx.ID_ANY)
self.button_7 = wx.Button(self, wx.ID_CANCEL, "")
self.button_6 = wx.Button(self, wx.ID_OK, "")
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_COMBOBOX, self.onChooseRobot, self.combo_box_robottype)
self.Bind(wx.EVT_TEXT, self.onEditRobotName, self.text_ctrl_robotname)
self.Bind(wx.EVT_BUTTON, self.onClickOK, self.button_6)
# end wxGlade
self.parent = parent
self.hsub = parent.hsub
self.proj = parent.proj
self.robot = RobotConfig()
self.original_robot = RobotConfig()
self.handler_labels = {}
self.handler_combos = {}
self.handler_buttons = {}
for handler_type_name in ht.getAllHandlerTypeName():
handler_type_class = ht.getHandlerTypeClass(handler_type_name)
self.handler_labels[handler_type_class] = wx.StaticText(self, -1, "%s handler:" % handler_type_name)
self.handler_combos[handler_type_class] = wx.ComboBox(self, -1, choices=[], style=wx.CB_DROPDOWN|wx.CB_READONLY)
self.handler_buttons[handler_type_class] = wx.Button(self, -1, "Configure...")
self.sizer_9.Add(self.handler_labels[handler_type_class], 0, wx.ALL|wx.ALIGN_RIGHT, 0)
self.sizer_9.Add(self.handler_combos[handler_type_class], 1, wx.ALL|wx.EXPAND, 0)
self.sizer_9.Add(self.handler_buttons[handler_type_class], 0, wx.ALL, 0)
self.Bind(wx.EVT_BUTTON, self.onClickConfigure, self.handler_buttons[handler_type_class])
self.Bind(wx.EVT_COMBOBOX, self.onChangeHandler, self.handler_combos[handler_type_class])
self.Layout()
self.SetSizeHints(self.GetSize()[0], 0) # Force width to stay the same
self.Fit()
# Set up the list of robot types
self.combo_box_robottype.Clear()
for r in self.parent.hsub.robot_configs:
self.combo_box_robottype.Append(r.r_type + (" (Not successfully loaded)" if not self.robot.successfully_loaded else ""))
def _populateHandlerCombos(self):
# Populate based on current robot type
for handler_type_class in ht.getAllHandlerTypeClass():
self.handler_combos[handler_type_class].Clear()
self.handler_combos[handler_type_class].SetValue("")
self.handler_buttons[handler_type_class].Enable(False)
# Load handlers under this robot
if handler_type_class in self.parent.hsub.handler_configs[self.robot.r_type]:
for handler_config in self.parent.hsub.handler_configs[self.robot.r_type][handler_type_class]:
self.handler_combos[handler_type_class].Insert(handler_config.name, 0, handler_config)
# Load handlers under shared folder for pose, motionControl, drive
if handler_type_class in self.parent.hsub.handler_configs['share'] and \
handler_type_class in [ht.PoseHandler, ht.MotionControlHandler, ht.DriveHandler]:
for handler_config in self.parent.hsub.handler_configs['share'][handler_type_class]:
self.handler_combos[handler_type_class].Insert(handler_config.name, 0, handler_config)
def __set_properties(self):
# begin wxGlade: addRobotDialog.__set_properties
self.SetTitle("Add/Configure Robot")
self.SetSize((637, 410))
# end wxGlade
def __do_layout(self):
# begin wxGlade: addRobotDialog.__do_layout
sizer_5 = wx.BoxSizer(wx.VERTICAL)
sizer_11 = wx.BoxSizer(wx.HORIZONTAL)
sizer_9 = wx.FlexGridSizer(0, 3, 2, 7)
sizer_8 = wx.BoxSizer(wx.HORIZONTAL)
sizer_7 = wx.BoxSizer(wx.HORIZONTAL)
sizer_7.Add(self.label_3, 0, wx.ALL, 5)
sizer_7.Add(self.combo_box_robottype, 1, wx.ALL, 5)
sizer_5.Add(sizer_7, 0, wx.EXPAND, 0)
sizer_8.Add(self.label_4, 0, wx.ALL, 5)
sizer_8.Add(self.text_ctrl_robotname, 1, wx.ALL, 5)
sizer_5.Add(sizer_8, 0, wx.EXPAND, 0)
sizer_5.Add(self.static_line_1, 0, wx.EXPAND, 0)
sizer_9.AddGrowableCol(1)
sizer_5.Add(sizer_9, 1, wx.ALL | wx.EXPAND, 10)
sizer_5.Add((20, 5), 0, wx.EXPAND, 0)
sizer_11.Add((20, 20), 1, wx.EXPAND, 0)
sizer_11.Add(self.button_7, 0, wx.ALL, 5)
sizer_11.Add(self.button_6, 0, wx.ALL, 5)
sizer_5.Add(sizer_11, 0, wx.EXPAND, 0)
self.SetSizer(sizer_5)
self.Layout()
# end wxGlade
self.sizer_9 = sizer_9
def _robot2dialog(self, robot, original=False):
"""
Update the GUI based on a robot object.
If `original` is True, save a reference to allow for reversion to defaults.
"""
self.robot = robot
if original:
self.original_robot = deepcopy(robot)
self.combo_box_robottype.SetStringSelection(self.robot.r_type + (" (Not successfully loaded)" if not self.robot.successfully_loaded else ""))
self.text_ctrl_robotname.SetValue(self.robot.name)
self._populateHandlerCombos()
for handler_type_class, handler_config in self.robot.handlers.iteritems():
# for each handler type, a robot can only have one handler config
self.handler_combos[handler_type_class].SetValue("")
self.handler_combos[handler_type_class].SetStringSelection(handler_config.name)
# Disable the "Configure" button if there are no parameters (with an exception for pose)
if len(handler_config.getMethodByName("__init__").para) == 0 and \
handler_config.h_type is not ht.PoseHandler:
self.handler_buttons[handler_type_class].Enable(False)
else:
self.handler_buttons[handler_type_class].Enable(True)
if self.handler_combos[handler_type_class].GetStringSelection() == "":
# when neither the robot or the share folder has the handler loaded
logging.warning('Cannot find and handler config in the options for handler type {!r}'\
.format(handler_type_class))
self.handler_buttons[handler_type_class].Enable(False)
def onClickConfigure(self, event):
src = event.GetEventObject()
# Figure out which "Configure..." button was pressed
for htype, b in self.handler_buttons.iteritems():
if src is b:
# TODO: gray out button when no action possible
if self.handler_combos[htype].GetValue() == "":
return
dlg = handlerConfigDialog(self, None, -1, "")
# Edit existing handler object
dlg._handler2dialog(deepcopy(self.robot.handlers[htype]))
if dlg.ShowModal() != wx.ID_CANCEL:
self.robot.handlers[htype] = dlg.handler
#self._robot2dialog(self.robot)
dlg.Destroy()
break
event.Skip()
def onChangeHandler(self, event):
src = event.GetEventObject()
# Figure out which handler was changed
for htype, b in self.handler_combos.iteritems():
if src is b:
hname = src.GetValue()
# If this handler has default values from the selected robot file, use them
# TODO: this will erase any previous config settings...
default_robot = self.parent.hsub.getRobotByType(self.robot.r_type)
handler_config_changed = default_robot.getHandlerOfRobot(htype)
if handler_config_changed.name != hname:
handler_config_changed = None
if handler_config_changed is None:
# just grab the plain handler
rname = self.robot.r_type
handler_config_changed = self.parent.hsub.getHandlerConfigDefault(rname, htype, hname)
if handler_config_changed is None:
# this handler might be a shared one
rname = 'share'
handler_config_changed = self.parent.hsub.getHandlerConfigDefault(rname, htype, hname)
if handler_config_changed is not None:
self.robot.handlers[htype] = handler_config_changed
else:
logging.warning('Cannot find the selected handler config.')
break
self._robot2dialog(self.robot)
event.Skip()
def _normalizeRobotName(self, name):
""" Clean a robot name and make sure it's not taken already"""
# Disallow empty names, because that would be super confusing
if name is None or name == "":
raise ValueError("Your robot needs a name!")
# Replace spaces and non-alphanums with underscores
name = re.sub(r"\W", "_", name.strip())
# Make sure another robot doesn't already have this name
if name != self.original_robot.name and \
name in (r.name for r in self.parent._getSelectedExperimentConfig().robots):
raise ValueError('Current configuration already contains a robot with name "{}".\n\nPlease rename.'.format(name))
return name
def onClickOK(self, event): # wxGlade: addRobotDialog.<event_handler>
# TODO: add in checks for all combo boxes (don't allow null handlers)
# Make sure that all required handler parameters have been specified
incomplete_params = []
for h_type, handler in self.robot.handlers.iteritems():
for param in handler.getMethodByName("__init__").para:
if param.getValue() is None:
incomplete_params.append((handler.name, param.name))
if len(incomplete_params) > 0:
wx.MessageBox("The following parameters need to be specified:\n" + \
"\n".join([" - {}.{}".format(hn, pn) for hn, pn in incomplete_params]),
"Error", style = wx.OK | wx.ICON_ERROR)
event.Skip(False)
return
# Make sure the robot name is OK
try:
self.robot.name = self._normalizeRobotName(self.robot.name)
except ValueError as e:
wx.MessageBox(e.message, "Error", style = wx.OK | wx.ICON_ERROR)
event.Skip(False)
return
event.Skip()
def onChooseRobot(self, event): # wxGlade: addRobotDialog.<event_handler>
# Strip the trailing note
robot_type = event.GetEventObject().GetValue().replace(" (Not successfully loaded)", "")
self.robot = deepcopy(self.parent.hsub.getRobotByType(robot_type))
self._robot2dialog(self.robot)
event.Skip()
def onEditRobotName(self, event): # wxGlade: addRobotDialog.<event_handler>
self.robot.name = event.GetString()
event.Skip()
# end of class addRobotDialog
class propMappingDialog(wx.Dialog):
def __init__(self, parent, *args, **kwds):
# begin wxGlade: propMappingDialog.__init__
kwds["style"] = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER | wx.THICK_FRAME
wx.Dialog.__init__(self, *args, **kwds)
self.label_6 = wx.StaticText(self, wx.ID_ANY, "Propositions:")
self.list_box_props = wx.ListBox(self, wx.ID_ANY, choices=[], style=wx.LB_SINGLE | wx.LB_ALWAYS_SB)
self.label_11 = wx.StaticText(self, wx.ID_ANY, "Continuous controller mapping:")
self.text_ctrl_mapping = wx.richtext.RichTextCtrl(self, wx.ID_ANY, "")
self.button_9 = wx.Button(self, wx.ID_ANY, " ^\nInsert/Apply")
self.label_7 = wx.StaticText(self, wx.ID_ANY, "Robots:")
self.list_box_robots = wx.ListBox(self, wx.ID_ANY, choices=[])
self.label_8 = wx.StaticText(self, wx.ID_ANY, "Sensors/Actuators:")
self.list_box_functions = wx.ListBox(self, wx.ID_ANY, choices=[])
self.label_10 = wx.StaticText(self, wx.ID_ANY, "Parameters:")
self.panel_method_cfg = wx.ScrolledWindow(self, wx.ID_ANY, style=wx.SUNKEN_BORDER | wx.TAB_TRAVERSAL)
self.button_11 = wx.Button(self, wx.ID_OK, "")
self.button_10 = wx.Button(self, wx.ID_CANCEL, "")
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_LISTBOX, self.onSelectProp, self.list_box_props)
self.Bind(wx.EVT_TEXT, self.onEditMapping, self.text_ctrl_mapping)
self.Bind(wx.EVT_BUTTON, self.onClickApply, self.button_9)
self.Bind(wx.EVT_LISTBOX, self.onSelectRobot, self.list_box_robots)
self.Bind(wx.EVT_LISTBOX, self.onSelectHandler, self.list_box_functions)
self.Bind(wx.EVT_BUTTON, self.onClickOK, self.button_10)
# end wxGlade
self.text_ctrl_mapping.Bind(wx.EVT_TEXT, self.onEditMapping)
self.text_ctrl_mapping.Bind(wx.EVT_LEFT_UP, self.onClickMapping)
#self.Bind(wx.EVT_LEFT_UP, self.onClickMapping, self.text_ctrl_mapping)
#self.text_ctrl_mapping.Bind(wx.EVT_LEFT_DOWN, self.onClickMapping)
self.text_ctrl_mapping.Bind(wx.EVT_KEY_UP, self.onClickMapping)
self.text_ctrl_mapping.Bind(wx.EVT_KEY_DOWN, self.onClickMapping)
self.proj = parent.proj
self.hsub = parent.hsub
self.robots = parent._getSelectedExperimentConfig().robots
# Set up the list of robots
for i, r in enumerate(self.robots):
self.list_box_robots.Insert("%s (%s)" % (r.name, r.r_type), i, r)
self.list_box_robots.Append("(Simulated)")
self.list_box_robots.SetSelection(0)
# Set up the list of props
self.list_box_props.Clear()
self.list_box_props.Append("=== Sensors ===")
#self.list_box_props.SetItemFont(n, wx.Font(10, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, ""))
#self.list_box_props.SetItemBackgroundColour(n, wx.Color(100,100,100))
for p in self.proj.all_sensors:
self.list_box_props.Append(p)
self.list_box_props.Append("")
self.list_box_props.Append("=== Actuators ===")
for p in self.proj.all_actuators:
self.list_box_props.Append(p)
self.mapping = None
self.tempMethod = None
self.list_box_props.SetSelection(0)
self.onSelectProp(None)
def _mapping2dialog(self, mapping):
self.mapping = mapping
# Set defaults as necessary
for p in self.proj.all_sensors:
if p not in mapping or self.mapping[p].strip() == "":
m = deepcopy(self.hsub.handler_configs["share"][ht.SensorHandler][0].getMethodByName("buttonPress"))
para = m.getParaByName("button_name")
para.setValue(p)
self.mapping[p] = self.hsub.method2String(m, "share")
for p in self.proj.all_actuators:
if p not in mapping or self.mapping[p].strip() == "":
m = deepcopy(self.hsub.handler_configs["share"][ht.ActuatorHandler][0].getMethodByName("setActuator"))
para = m.getParaByName("name")
para.setValue(p)
self.mapping[p] = self.hsub.method2String(m, "share")
def __set_properties(self):
# begin wxGlade: propMappingDialog.__set_properties
self.SetTitle("Proposition Mapping")
self.SetSize((981, 419))
self.panel_method_cfg.SetScrollRate(10, 10)
# end wxGlade
def __do_layout(self):
# begin wxGlade: propMappingDialog.__do_layout
sizer_14 = wx.BoxSizer(wx.HORIZONTAL)
sizer_16 = wx.BoxSizer(wx.VERTICAL)
sizer_25 = wx.BoxSizer(wx.HORIZONTAL)
sizer_19 = wx.BoxSizer(wx.HORIZONTAL)
sizer_24 = wx.BoxSizer(wx.VERTICAL)
sizer_21 = wx.BoxSizer(wx.VERTICAL)
sizer_20 = wx.BoxSizer(wx.VERTICAL)
sizer_18 = wx.BoxSizer(wx.HORIZONTAL)
sizer_15 = wx.BoxSizer(wx.VERTICAL)
sizer_15.Add(self.label_6, 0, wx.LEFT | wx.RIGHT | wx.TOP, 5)
sizer_15.Add(self.list_box_props, 1, wx.ALL | wx.EXPAND, 5)
sizer_14.Add(sizer_15, 1, wx.EXPAND, 0)
sizer_16.Add(self.label_11, 0, wx.ALL, 5)
sizer_16.Add(self.text_ctrl_mapping, 1, wx.ALL | wx.EXPAND, 5)
sizer_18.Add((20, 20), 1, wx.EXPAND, 0)
sizer_18.Add(self.button_9, 0, wx.ALL, 5)
sizer_18.Add((20, 20), 1, wx.EXPAND, 0)
sizer_16.Add(sizer_18, 0, wx.EXPAND, 0)
sizer_20.Add(self.label_7, 0, wx.ALL, 5)
sizer_20.Add(self.list_box_robots, 1, wx.ALL | wx.EXPAND, 5)
sizer_19.Add(sizer_20, 1, wx.EXPAND, 0)
sizer_21.Add(self.label_8, 0, wx.ALL, 5)
sizer_21.Add(self.list_box_functions, 1, wx.ALL | wx.EXPAND, 5)
sizer_19.Add(sizer_21, 1, wx.EXPAND, 0)
sizer_24.Add(self.label_10, 0, wx.ALL, 5)
sizer_24.Add(self.panel_method_cfg, 1, wx.ALL | wx.EXPAND, 5)
sizer_19.Add(sizer_24, 3, wx.EXPAND, 0)
sizer_16.Add(sizer_19, 5, wx.EXPAND, 0)
sizer_25.Add((20, 20), 1, wx.EXPAND, 0)
sizer_25.Add(self.button_11, 0, wx.ALL, 5)
sizer_25.Add(self.button_10, 0, wx.ALL, 5)
sizer_16.Add(sizer_25, 0, wx.EXPAND, 0)
sizer_14.Add(sizer_16, 4, wx.EXPAND, 0)
self.SetSizer(sizer_14)
self.Layout()
# end wxGlade
def onSelectProp(self, event): # wxGlade: propMappingDialog.<event_handler>
# If you've selected a header, not a proposition, then gray out the edit box
if self.list_box_props.GetStringSelection().startswith("===") or self.list_box_props.GetStringSelection() == "":
self.text_ctrl_mapping.Enable(False)
self.text_ctrl_mapping.SetValue("")
self.list_box_robots.Enable(False)
self.list_box_functions.Clear()
self.list_box_functions.Enable(False)
else:
self.text_ctrl_mapping.Enable(True)
self.list_box_robots.Enable(True)
self.list_box_functions.Enable(True)
self.onSelectRobot(None)
if event.GetString() in self.mapping:
self.text_ctrl_mapping.SetValue(self.mapping[event.GetString()])
else:
self.text_ctrl_mapping.SetValue("")
# Auto-select the first term
self.onClickMapping(None)
if event is not None:
event.Skip()
def onClickApply(self, event): # wxGlade: propMappingDialog.<event_handler>
if self.tempMethod is not None:
#for p in self.tempMethod.para:
# print p.name, p.value
rname = self.list_box_robots.GetStringSelection().split(" ")[0]
if rname == "(Simulated)":
rname = "share"
method_string = self.hsub.method2String(self.tempMethod, rname)
if method_string is None:
print "ERROR: Method cannot be mapped to string"
else:
start, end = self.text_ctrl_mapping.GetSelection()
if start < 0:
# If nothing is selected, just insert
start = self.text_ctrl_mapping.GetInsertionPoint()
end = start
self.text_ctrl_mapping.Replace(start, end, method_string)
self.text_ctrl_mapping.SetSelection(start, start + len(method_string))
event.Skip()
def onSelectRobot(self, event): # wxGlade: propMappingDialog.<event_handler>
# Populate list of functions
self.list_box_functions.Clear()
pos = self.list_box_robots.GetSelection()
r = self.list_box_robots.GetClientData(pos)
# Only show sensors for sensor props, and actuators for actuator props
if self.list_box_props.GetStringSelection() in self.proj.all_sensors:
if self.list_box_robots.GetStringSelection() == "(Simulated)":
# TODO: might there be more than one type of handler in share?
methods = self.hsub.handler_configs["share"][ht.SensorHandler][0].methods
else:
methods = getattr(r.getHandlerOfRobot(ht.SensorHandler), 'methods', [])
elif self.list_box_props.GetStringSelection() in self.proj.all_actuators:
if self.list_box_robots.GetStringSelection() == "(Simulated)":
# TODO: might there be more than one type of handler in share?
methods = self.hsub.handler_configs["share"][ht.ActuatorHandler][0].methods
else:
methods = getattr(r.getHandlerOfRobot(ht.ActuatorHandler), 'methods', [])
else:
print ("WARNING: Selected proposition '%s' that is neither sensor nor actuator. " +
"This should be impossible.") % (self.list_box_props.GetStringSelection())
for i, m in enumerate([m for m in methods if not m.name.startswith("_")]):
self.list_box_functions.Insert("%s" % (m.name), i, m)
if event is not None:
event.Skip()
def onSelectHandler(self, event): # wxGlade: propMappingDialog.<event_handler>
if event is not None:
event.Skip()
pos = self.list_box_functions.GetSelection()
if pos < 0:
if self.panel_method_cfg.GetSizer() is not None:
self.panel_method_cfg.GetSizer().Clear(deleteWindows=True)
return
m = self.list_box_functions.GetClientData(pos)
self.tempMethod = deepcopy(m)
drawParamConfigPane(self.panel_method_cfg, self.tempMethod, self.proj)
self.Layout()
def onClickOK(self, event): # wxGlade: propMappingDialog.<event_handler>
#print "Event handler `onClickOK' not implemented!"
event.Skip()
def onClickMapping(self, event):
if event is not None:
event.Skip()
if event.GetEventType() in [wx.wxEVT_KEY_DOWN, wx.wxEVT_KEY_UP] and \
event.GetKeyCode() not in [wx.WXK_LEFT, wx.WXK_RIGHT, wx.WXK_UP, wx.WXK_DOWN, wx.WXK_HOME, wx.WXK_END,
wx.WXK_NUMPAD_LEFT, wx.WXK_NUMPAD_RIGHT, wx.WXK_NUMPAD_UP, wx.WXK_NUMPAD_DOWN]:
# wx.WXK_BACK, wx.WXK_DELETE]:
return
# TODO: Make backspace work as expected; maybe colorize/bold
i = self.text_ctrl_mapping.GetInsertionPoint()
# Special case for beginning or end of field
if i == 0 or i == self.text_ctrl_mapping.GetLastPosition():
self.text_ctrl_mapping.SelectNone()
return
else:
# Select first term
i = 1
s = self.text_ctrl_mapping.GetValue()
# Don't bother going any further if it's blank
if s.strip() == "":
return
start, end = self.text_ctrl_mapping.GetSelection()
if start >= 0:
# If something is selected, check to make sure neither side is inside a methodstring
check_pts = [start, end]
else:
# Otherwise just make sure the insertion point hasn't moved inside a methodstring
check_pts = [i]
try:
cds, _ = parseCallString(s, mode="sensor") # Sensor mode is more lenient than actuator
except SyntaxError:
# If there was a parsing error, it's not a proper methodstring anyways
return
cd_local = None
for cd in cds:
if any([i > cd.start_pos and i < cd.end_pos for i in check_pts]):
cd_local = cd
break
if cd_local is None:
return
# Make sure the name is the correct length
if len(cd_local.name) != 3:
return
# Make sure the robot name is valid
rname = cd_local.name[0]
if rname == "share":
rname = "(Simulated)"
corresponding_robots = [n for n in self.list_box_robots.GetItems() if n.startswith(rname)]
if len(corresponding_robots) != 1:
print "WARNING: No unique robot corresponding to name '%s'." % m.group("robot_name")
return
# Force selection of the entire keyword, and place insertion caret as appropriate
self.text_ctrl_mapping.SetSelection(cd_local.start_pos, cd_local.end_pos)
if event is not None:
if event.GetEventType() in [wx.wxEVT_KEY_DOWN, wx.wxEVT_KEY_UP]:
if event.GetKeyCode() in [wx.WXK_LEFT, wx.WXK_HOME, wx.WXK_UP, wx.WXK_NUMPAD_LEFT, wx.WXK_NUMPAD_UP]:
self.text_ctrl_mapping.MoveCaret(cd_local.start_pos-1)
elif event.GetKeyCode() in [wx.WXK_RIGHT, wx.WXK_END, wx.WXK_DOWN, wx.WXK_NUMPAD_RIGHT, wx.WXK_NUMPAD_DOWN]:
self.text_ctrl_mapping.MoveCaret(cd_local.end_pos-1)
# Load detailed view of keyword below
self.list_box_robots.SetStringSelection(corresponding_robots[0])
self.onSelectRobot(None)
self.list_box_functions.SetStringSelection(cd_local.name[2])
self.tempMethod = self.hsub.string2Method(s[cd_local.start_pos:cd_local.end_pos], self.robots)
drawParamConfigPane(self.panel_method_cfg, self.tempMethod, self.proj)
self.Layout()
def onEditMapping(self, event): # wxGlade: propMappingDialog.<event_handler>
if not self.text_ctrl_mapping.IsEnabled():
return
prop_name = self.list_box_props.GetStringSelection()
self.mapping[prop_name] = self.text_ctrl_mapping.GetValue()
event.Skip()
# end of class propMappingDialog
if __name__ == "__main__":
SimConfigEditor = wx.PySimpleApp(0)
wx.InitAllImageHandlers()
SimSetupDialog = simSetupDialog(None, -1, "")
SimConfigEditor.SetTopWindow(SimSetupDialog)
SimSetupDialog.Show()
SimConfigEditor.MainLoop()
| VerifiableRobotics/LTLMoP | src/lib/configEditor.py | Python | gpl-3.0 | 64,636 | 0.003992 |
#!/usr/bin/env python
"""
Install.py tool to do a generic build of a library
soft linked to by many of the lib/Install.py files
used to automate the steps described in the corresponding lib/README
"""
from __future__ import print_function
import sys, os, subprocess
from argparse import ArgumentParser
sys.path.append('..')
from install_helpers import get_cpus, fullpath
parser = ArgumentParser(prog='Install.py',
description="LAMMPS library build wrapper script")
HELP = """
Syntax from src dir: make lib-libname args="-m machine -e suffix"
Syntax from lib dir: python Install.py -m machine -e suffix
libname = name of lib dir (e.g. atc, h5md, meam, poems, etc)
specify -m and optionally -e, order does not matter
Examples:
make lib-poems args="-m serial" # build POEMS lib with same settings as in the serial Makefile in src
make lib-colvars args="-m mpi" # build COLVARS lib with same settings as in the mpi Makefile in src
make lib-meam args="-m ifort" # build MEAM lib with custom Makefile.ifort (using Intel Fortran)
"""
# parse and process arguments
parser.add_argument("-m", "--machine",
help="suffix of a <libname>/Makefile.* file used for compiling this library")
parser.add_argument("-e", "--extramake",
help="set EXTRAMAKE variable in <libname>/Makefile.<machine> to Makefile.lammps.<extramake>")
args = parser.parse_args()
# print help message and exit, if neither build nor path options are given
if not args.machine and not args.extramake:
parser.print_help()
sys.exit(HELP)
machine = args.machine
extraflag = args.extramake
if extraflag:
suffix = args.extramake
else:
suffix = 'empty'
# set lib from working dir
cwd = fullpath('.')
lib = os.path.basename(cwd)
# create Makefile.auto as copy of Makefile.machine
# reset EXTRAMAKE if requested
if not os.path.exists("Makefile.%s" % machine):
sys.exit("lib/%s/Makefile.%s does not exist" % (lib, machine))
lines = open("Makefile.%s" % machine, 'r').readlines()
fp = open("Makefile.auto", 'w')
has_extramake = False
for line in lines:
words = line.split()
if len(words) == 3 and words[0] == "EXTRAMAKE" and words[1] == '=':
has_extramake = True
if extraflag:
line = line.replace(words[2], "Makefile.lammps.%s" % suffix)
fp.write(line)
fp.close()
# make the library via Makefile.auto optionally with parallel make
n_cpus = get_cpus()
print("Building lib%s.a ..." % lib)
cmd = "make -f Makefile.auto clean; make -f Makefile.auto -j%d" % n_cpus
try:
txt = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
print(txt.decode('UTF-8'))
except subprocess.CalledProcessError as e:
print("Make failed with:\n %s" % e.output.decode('UTF-8'))
sys.exit(1)
if os.path.exists("lib%s.a" % lib):
print("Build was successful")
else:
sys.exit("Build of lib/%s/lib%s.a was NOT successful" % (lib, lib))
if has_extramake and not os.path.exists("Makefile.lammps"):
print("WARNING: lib/%s/Makefile.lammps was NOT created" % lib)
| akohlmey/lammps | lib/mesont/Install.py | Python | gpl-2.0 | 3,025 | 0.006942 |
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from jupyter_core.paths import jupyter_data_dir
import subprocess
import os
import errno
import stat
PEM_FILE = os.path.join(jupyter_data_dir(), 'notebook.pem')
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
# Set a certificate if USE_HTTPS is set to any value
if 'USE_HTTPS' in os.environ:
if not os.path.isfile(PEM_FILE):
# Ensure PEM_FILE directory exists
dir_name = os.path.dirname(PEM_FILE)
try:
os.makedirs(dir_name)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(dir_name):
pass
else: raise
# Generate a certificate if one doesn't exist on disk
subprocess.check_call(['openssl', 'req', '-new',
'-newkey', 'rsa:2048', '-days', '365', '-nodes', '-x509',
'-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated',
'-keyout', PEM_FILE, '-out', PEM_FILE])
# Restrict access to PEM_FILE
os.chmod(PEM_FILE, stat.S_IRUSR | stat.S_IWUSR)
c.NotebookApp.certfile = PEM_FILE
# Set a password if PASSWORD is set
if 'PASSWORD' in os.environ:
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
| midvalestudent/jupyter | docker/base/jupyter_notebook_config.py | Python | mit | 1,414 | 0.004243 |
from braces.views import LoginRequiredMixin
from django.views.generic import UpdateView
from oauth2_provider.exceptions import OAuthToolkitError
from oauth2_provider.http import HttpResponseUriRedirect
from oauth2_provider.models import get_application_model as get_oauth2_application_model
from oauth2_provider.settings import oauth2_settings
from oauth2_provider.views import AuthorizationView
from oauth2_provider.views.application import ApplicationRegistration
from core.utils import get_default_scopes
from .forms import RegistrationForm
class ApplicationRegistrationView(ApplicationRegistration):
form_class = RegistrationForm
class ApplicationUpdateView(LoginRequiredMixin, UpdateView):
"""
View used to update an application owned by the request.user
"""
form_class = RegistrationForm
context_object_name = 'application'
template_name = "oauth2_provider/application_form.html"
def get_queryset(self):
return get_oauth2_application_model().objects.filter(user=self.request.user)
class CustomAuthorizationView(AuthorizationView):
def form_valid(self, form):
client_id = form.cleaned_data.get('client_id', '')
application = get_oauth2_application_model().objects.get(client_id=client_id)
scopes = form.cleaned_data.get('scope', '')
scopes = set(scopes.split(' '))
scopes.update(set(get_default_scopes(application)))
private_scopes = application.private_scopes
if private_scopes:
private_scopes = set(private_scopes.split(' '))
scopes.update(private_scopes)
scopes = ' '.join(list(scopes))
form.cleaned_data['scope'] = scopes
return super(CustomAuthorizationView, self).form_valid(form)
def get(self, request, *args, **kwargs):
"""
Copied blatantly from super method. Had to change few stuff, but didn't find better way
than copying and editing the whole stuff.
Sin Count += 1
"""
try:
scopes, credentials = self.validate_authorization_request(request)
try:
del credentials['request']
# Removing oauthlib.Request from credentials. This is not required in future
except KeyError: # pylint: disable=pointless-except
pass
kwargs['scopes_descriptions'] = [oauth2_settings.SCOPES[scope] for scope in scopes]
kwargs['scopes'] = scopes
# at this point we know an Application instance with such client_id exists in the database
application = get_oauth2_application_model().objects.get(
client_id=credentials['client_id']) # TODO: cache it!
kwargs['application'] = application
kwargs.update(credentials)
self.oauth2_data = kwargs
# following two loc are here only because of https://code.djangoproject.com/ticket/17795
form = self.get_form(self.get_form_class())
kwargs['form'] = form
# Check to see if the user has already granted access and return
# a successful response depending on 'approval_prompt' url parameter
require_approval = request.GET.get('approval_prompt', oauth2_settings.REQUEST_APPROVAL_PROMPT)
# If skip_authorization field is True, skip the authorization screen even
# if this is the first use of the application and there was no previous authorization.
# This is useful for in-house applications-> assume an in-house applications
# are already approved.
if application.skip_authorization:
uri, headers, body, status = self.create_authorization_response(
request=self.request, scopes=" ".join(scopes),
credentials=credentials, allow=True)
return HttpResponseUriRedirect(uri)
elif require_approval == 'auto':
tokens = request.user.accesstoken_set.filter(application=kwargs['application']).all().order_by('-id')
if len(tokens) > 0:
token = tokens[0]
if len(tokens) > 1:
# Enforce one token pair per user policy. Remove all older tokens
request.user.accesstoken_set.exclude(pk=token.id).all().delete()
# check past authorizations regarded the same scopes as the current one
if token.allow_scopes(scopes):
uri, headers, body, status = self.create_authorization_response(
request=self.request, scopes=" ".join(scopes),
credentials=credentials, allow=True)
return HttpResponseUriRedirect(uri)
return self.render_to_response(self.get_context_data(**kwargs))
except OAuthToolkitError as error:
return self.error_response(error)
| DheerendraRathor/ldap-oauth2 | application/views.py | Python | gpl-3.0 | 4,953 | 0.003836 |
# sqlalchemy/pool.py
# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Connection pooling for DB-API connections.
Provides a number of connection pool implementations for a variety of
usage scenarios and thread behavior requirements imposed by the
application, DB-API or database itself.
Also provides a DB-API 2.0 connection proxying mechanism allowing
regular DB-API connect() methods to be transparently managed by a
SQLAlchemy connection pool.
"""
import weakref, time, threading
from sqlalchemy import exc, log
from sqlalchemy import queue as sqla_queue
from sqlalchemy.util import threading, pickle, as_interface, memoized_property
proxies = {}
def manage(module, **params):
"""Return a proxy for a DB-API module that automatically
pools connections.
Given a DB-API 2.0 module and pool management parameters, returns
a proxy for the module that will automatically pool connections,
creating new connection pools for each distinct set of connection
arguments sent to the decorated module's connect() function.
:param module: a DB-API 2.0 database module
:param poolclass: the class used by the pool module to provide
pooling. Defaults to :class:`QueuePool`.
:param \*\*params: will be passed through to *poolclass*
"""
try:
return proxies[module]
except KeyError:
return proxies.setdefault(module, _DBProxy(module, **params))
def clear_managers():
"""Remove all current DB-API 2.0 managers.
All pools and connections are disposed.
"""
for manager in proxies.itervalues():
manager.close()
proxies.clear()
class Pool(log.Identified):
"""Abstract base class for connection pools."""
def __init__(self,
creator, recycle=-1, echo=None,
use_threadlocal=False,
logging_name=None,
reset_on_return=True, listeners=None):
"""
Construct a Pool.
:param creator: a callable function that returns a DB-API
connection object. The function will be called with
parameters.
:param recycle: If set to non -1, number of seconds between
connection recycling, which means upon checkout, if this
timeout is surpassed the connection will be closed and
replaced with a newly opened connection. Defaults to -1.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param echo: If True, connections being pulled and retrieved
from the pool will be logged to the standard output, as well
as pool sizing information. Echoing can also be achieved by
enabling logging for the "sqlalchemy.pool"
namespace. Defaults to False.
:param use_threadlocal: If set to True, repeated calls to
:meth:`connect` within the same application thread will be
guaranteed to return the same connection object, if one has
already been retrieved from the pool and has not been
returned yet. Offers a slight performance advantage at the
cost of individual transactions by default. The
:meth:`unique_connection` method is provided to bypass the
threadlocal behavior installed into :meth:`connect`.
:param reset_on_return: If true, reset the database state of
connections returned to the pool. This is typically a
ROLLBACK to release locks and transaction resources.
Disable at your own peril. Defaults to True.
:param listeners: A list of
:class:`~sqlalchemy.interfaces.PoolListener`-like objects or
dictionaries of callables that receive events when DB-API
connections are created, checked out and checked in to the
pool.
"""
if logging_name:
self.logging_name = self._orig_logging_name = logging_name
else:
self._orig_logging_name = None
self.logger = log.instance_logger(self, echoflag=echo)
self._threadconns = threading.local()
self._creator = creator
self._recycle = recycle
self._use_threadlocal = use_threadlocal
self._reset_on_return = reset_on_return
self.echo = echo
self.listeners = []
self._on_connect = []
self._on_first_connect = []
self._on_checkout = []
self._on_checkin = []
if listeners:
for l in listeners:
self.add_listener(l)
def unique_connection(self):
"""Produce a DBAPI connection that is not referenced by any
thread-local context.
This method is different from :meth:`.Pool.connect` only if the
``use_threadlocal`` flag has been set to ``True``.
"""
return _ConnectionFairy(self).checkout()
def create_connection(self):
"""Called by subclasses to create a new ConnectionRecord."""
return _ConnectionRecord(self)
def recreate(self):
"""Return a new :class:`.Pool`, of the same class as this one
and configured with identical creation arguments.
This method is used in conjunection with :meth:`dispose`
to close out an entire :class:`.Pool` and create a new one in
its place.
"""
raise NotImplementedError()
def dispose(self):
"""Dispose of this pool.
This method leaves the possibility of checked-out connections
remaining open, It is advised to not reuse the pool once dispose()
is called, and to instead use a new pool constructed by the
recreate() method.
"""
raise NotImplementedError()
def connect(self):
"""Return a DBAPI connection from the pool.
The connection is instrumented such that when its
``close()`` method is called, the connection will be returned to
the pool.
"""
if not self._use_threadlocal:
return _ConnectionFairy(self).checkout()
try:
rec = self._threadconns.current()
if rec:
return rec.checkout()
except AttributeError:
pass
agent = _ConnectionFairy(self)
self._threadconns.current = weakref.ref(agent)
return agent.checkout()
def return_conn(self, record):
"""Given a _ConnectionRecord, return it to the :class:`.Pool`.
This method is called when an instrumented DBAPI connection
has its ``close()`` method called.
"""
if self._use_threadlocal and hasattr(self._threadconns, "current"):
del self._threadconns.current
self.do_return_conn(record)
def get(self):
"""Return a non-instrumented DBAPI connection from this :class:`.Pool`.
This is called by ConnectionRecord in order to get its DBAPI
resource.
"""
return self.do_get()
def do_get(self):
"""Implementation for :meth:`get`, supplied by subclasses."""
raise NotImplementedError()
def do_return_conn(self, conn):
"""Implementation for :meth:`return_conn`, supplied by subclasses."""
raise NotImplementedError()
def status(self):
raise NotImplementedError()
def add_listener(self, listener):
"""Add a ``PoolListener``-like object to this pool.
``listener`` may be an object that implements some or all of
PoolListener, or a dictionary of callables containing implementations
of some or all of the named methods in PoolListener.
"""
listener = as_interface(listener,
methods=('connect', 'first_connect', 'checkout', 'checkin'))
self.listeners.append(listener)
if hasattr(listener, 'connect'):
self._on_connect.append(listener)
if hasattr(listener, 'first_connect'):
self._on_first_connect.append(listener)
if hasattr(listener, 'checkout'):
self._on_checkout.append(listener)
if hasattr(listener, 'checkin'):
self._on_checkin.append(listener)
class _ConnectionRecord(object):
def __init__(self, pool):
self.__pool = pool
self.connection = self.__connect()
self.info = {}
ls = pool.__dict__.pop('_on_first_connect', None)
if ls is not None:
for l in ls:
l.first_connect(self.connection, self)
if pool._on_connect:
for l in pool._on_connect:
l.connect(self.connection, self)
def close(self):
if self.connection is not None:
self.__pool.logger.debug("Closing connection %r", self.connection)
try:
self.connection.close()
except (SystemExit, KeyboardInterrupt):
raise
except:
self.__pool.logger.debug("Exception closing connection %r",
self.connection)
def invalidate(self, e=None):
if e is not None:
self.__pool.logger.info(
"Invalidate connection %r (reason: %s:%s)",
self.connection, e.__class__.__name__, e)
else:
self.__pool.logger.info(
"Invalidate connection %r", self.connection)
self.__close()
self.connection = None
def get_connection(self):
if self.connection is None:
self.connection = self.__connect()
self.info.clear()
if self.__pool._on_connect:
for l in self.__pool._on_connect:
l.connect(self.connection, self)
elif self.__pool._recycle > -1 and \
time.time() - self.starttime > self.__pool._recycle:
self.__pool.logger.info(
"Connection %r exceeded timeout; recycling",
self.connection)
self.__close()
self.connection = self.__connect()
self.info.clear()
if self.__pool._on_connect:
for l in self.__pool._on_connect:
l.connect(self.connection, self)
return self.connection
def __close(self):
try:
self.__pool.logger.debug("Closing connection %r", self.connection)
self.connection.close()
except (SystemExit, KeyboardInterrupt):
raise
except Exception, e:
self.__pool.logger.debug(
"Connection %r threw an error on close: %s",
self.connection, e)
def __connect(self):
try:
self.starttime = time.time()
connection = self.__pool._creator()
self.__pool.logger.debug("Created new connection %r", connection)
return connection
except Exception, e:
self.__pool.logger.debug("Error on connect(): %s", e)
raise
def _finalize_fairy(connection, connection_record, pool, ref=None):
_refs.discard(connection_record)
if ref is not None and \
(connection_record.fairy is not ref or
isinstance(pool, AssertionPool)):
return
if connection is not None:
try:
if pool._reset_on_return:
connection.rollback()
# Immediately close detached instances
if connection_record is None:
connection.close()
except Exception, e:
if connection_record is not None:
connection_record.invalidate(e=e)
if isinstance(e, (SystemExit, KeyboardInterrupt)):
raise
if connection_record is not None:
connection_record.fairy = None
pool.logger.debug("Connection %r being returned to pool", connection)
if pool._on_checkin:
for l in pool._on_checkin:
l.checkin(connection, connection_record)
pool.return_conn(connection_record)
_refs = set()
class _ConnectionFairy(object):
"""Proxies a DB-API connection and provides return-on-dereference
support."""
__slots__ = '_pool', '__counter', 'connection', \
'_connection_record', '__weakref__', '_detached_info'
def __init__(self, pool):
self._pool = pool
self.__counter = 0
try:
rec = self._connection_record = pool.get()
conn = self.connection = self._connection_record.get_connection()
rec.fairy = weakref.ref(
self,
lambda ref:_finalize_fairy(conn, rec, pool, ref)
)
_refs.add(rec)
except:
# helps with endless __getattr__ loops later on
self.connection = None
self._connection_record = None
raise
self._pool.logger.debug("Connection %r checked out from pool" %
self.connection)
@property
def _logger(self):
return self._pool.logger
@property
def is_valid(self):
return self.connection is not None
@property
def info(self):
"""An info collection unique to this DB-API connection."""
try:
return self._connection_record.info
except AttributeError:
if self.connection is None:
raise exc.InvalidRequestError("This connection is closed")
try:
return self._detached_info
except AttributeError:
self._detached_info = value = {}
return value
def invalidate(self, e=None):
"""Mark this connection as invalidated.
The connection will be immediately closed. The containing
ConnectionRecord will create a new connection when next used.
"""
if self.connection is None:
raise exc.InvalidRequestError("This connection is closed")
if self._connection_record is not None:
self._connection_record.invalidate(e=e)
self.connection = None
self._close()
def cursor(self, *args, **kwargs):
try:
c = self.connection.cursor(*args, **kwargs)
return _CursorFairy(self, c)
except Exception, e:
self.invalidate(e=e)
raise
def __getattr__(self, key):
return getattr(self.connection, key)
def checkout(self):
if self.connection is None:
raise exc.InvalidRequestError("This connection is closed")
self.__counter += 1
if not self._pool._on_checkout or self.__counter != 1:
return self
# Pool listeners can trigger a reconnection on checkout
attempts = 2
while attempts > 0:
try:
for l in self._pool._on_checkout:
l.checkout(self.connection, self._connection_record, self)
return self
except exc.DisconnectionError, e:
self._pool.logger.info(
"Disconnection detected on checkout: %s", e)
self._connection_record.invalidate(e)
self.connection = self._connection_record.get_connection()
attempts -= 1
self._pool.logger.info("Reconnection attempts exhausted on checkout")
self.invalidate()
raise exc.InvalidRequestError("This connection is closed")
def detach(self):
"""Separate this connection from its Pool.
This means that the connection will no longer be returned to the
pool when closed, and will instead be literally closed. The
containing ConnectionRecord is separated from the DB-API connection,
and will create a new connection when next used.
Note that any overall connection limiting constraints imposed by a
Pool implementation may be violated after a detach, as the detached
connection is removed from the pool's knowledge and control.
"""
if self._connection_record is not None:
_refs.remove(self._connection_record)
self._connection_record.fairy = None
self._connection_record.connection = None
self._pool.do_return_conn(self._connection_record)
self._detached_info = \
self._connection_record.info.copy()
self._connection_record = None
def close(self):
self.__counter -= 1
if self.__counter == 0:
self._close()
def _close(self):
_finalize_fairy(self.connection, self._connection_record, self._pool)
self.connection = None
self._connection_record = None
class _CursorFairy(object):
__slots__ = '_parent', 'cursor', 'execute'
def __init__(self, parent, cursor):
self._parent = parent
self.cursor = cursor
self.execute = cursor.execute
def invalidate(self, e=None):
self._parent.invalidate(e=e)
def __iter__(self):
return iter(self.cursor)
def close(self):
try:
self.cursor.close()
except Exception, e:
try:
ex_text = str(e)
except TypeError:
ex_text = repr(e)
self._parent._logger.warn("Error closing cursor: %s", ex_text)
if isinstance(e, (SystemExit, KeyboardInterrupt)):
raise
def __setattr__(self, key, value):
if key in self.__slots__:
object.__setattr__(self, key, value)
else:
setattr(self.cursor, key, value)
def __getattr__(self, key):
return getattr(self.cursor, key)
class SingletonThreadPool(Pool):
"""A Pool that maintains one connection per thread.
Maintains one connection per each thread, never moving a connection to a
thread other than the one which it was created in.
This is used for SQLite, which both does not handle multithreading by
default, and also requires a singleton connection if a :memory: database
is being used.
Options are the same as those of :class:`Pool`, as well as:
:param pool_size: The number of threads in which to maintain connections
at once. Defaults to five.
"""
def __init__(self, creator, pool_size=5, **kw):
kw['use_threadlocal'] = True
Pool.__init__(self, creator, **kw)
self._conn = threading.local()
self._all_conns = set()
self.size = pool_size
def recreate(self):
self.logger.info("Pool recreating")
return SingletonThreadPool(self._creator,
pool_size=self.size,
recycle=self._recycle,
echo=self.echo,
logging_name=self._orig_logging_name,
use_threadlocal=self._use_threadlocal,
listeners=self.listeners)
def dispose(self):
"""Dispose of this pool."""
for conn in self._all_conns:
try:
conn.close()
except (SystemExit, KeyboardInterrupt):
raise
except:
# pysqlite won't even let you close a conn from a thread
# that didn't create it
pass
self._all_conns.clear()
def dispose_local(self):
if hasattr(self._conn, 'current'):
conn = self._conn.current()
self._all_conns.discard(conn)
del self._conn.current
def cleanup(self):
while len(self._all_conns) > self.size:
c = self._all_conns.pop()
c.close()
def status(self):
return "SingletonThreadPool id:%d size: %d" % \
(id(self), len(self._all_conns))
def do_return_conn(self, conn):
pass
def do_get(self):
try:
c = self._conn.current()
if c:
return c
except AttributeError:
pass
c = self.create_connection()
self._conn.current = weakref.ref(c)
self._all_conns.add(c)
if len(self._all_conns) > self.size:
self.cleanup()
return c
class QueuePool(Pool):
"""A Pool that imposes a limit on the number of open connections."""
def __init__(self, creator, pool_size=5, max_overflow=10, timeout=30,
**kw):
"""
Construct a QueuePool.
:param creator: a callable function that returns a DB-API
connection object. The function will be called with
parameters.
:param pool_size: The size of the pool to be maintained,
defaults to 5. This is the largest number of connections that
will be kept persistently in the pool. Note that the pool
begins with no connections; once this number of connections
is requested, that number of connections will remain.
``pool_size`` can be set to 0 to indicate no size limit; to
disable pooling, use a :class:`~sqlalchemy.pool.NullPool`
instead.
:param max_overflow: The maximum overflow size of the
pool. When the number of checked-out connections reaches the
size set in pool_size, additional connections will be
returned up to this limit. When those additional connections
are returned to the pool, they are disconnected and
discarded. It follows then that the total number of
simultaneous connections the pool will allow is pool_size +
`max_overflow`, and the total number of "sleeping"
connections the pool will allow is pool_size. `max_overflow`
can be set to -1 to indicate no overflow limit; no limit
will be placed on the total number of concurrent
connections. Defaults to 10.
:param timeout: The number of seconds to wait before giving up
on returning a connection. Defaults to 30.
:param recycle: If set to non -1, number of seconds between
connection recycling, which means upon checkout, if this
timeout is surpassed the connection will be closed and
replaced with a newly opened connection. Defaults to -1.
:param echo: If True, connections being pulled and retrieved
from the pool will be logged to the standard output, as well
as pool sizing information. Echoing can also be achieved by
enabling logging for the "sqlalchemy.pool"
namespace. Defaults to False.
:param use_threadlocal: If set to True, repeated calls to
:meth:`connect` within the same application thread will be
guaranteed to return the same connection object, if one has
already been retrieved from the pool and has not been
returned yet. Offers a slight performance advantage at the
cost of individual transactions by default. The
:meth:`unique_connection` method is provided to bypass the
threadlocal behavior installed into :meth:`connect`.
:param reset_on_return: If true, reset the database state of
connections returned to the pool. This is typically a
ROLLBACK to release locks and transaction resources.
Disable at your own peril. Defaults to True.
:param listeners: A list of
:class:`~sqlalchemy.interfaces.PoolListener`-like objects or
dictionaries of callables that receive events when DB-API
connections are created, checked out and checked in to the
pool.
"""
Pool.__init__(self, creator, **kw)
self._pool = sqla_queue.Queue(pool_size)
self._overflow = 0 - pool_size
self._max_overflow = max_overflow
self._timeout = timeout
self._overflow_lock = self._max_overflow > -1 and \
threading.Lock() or None
def recreate(self):
self.logger.info("Pool recreating")
return QueuePool(self._creator, pool_size=self._pool.maxsize,
max_overflow=self._max_overflow,
timeout=self._timeout,
recycle=self._recycle, echo=self.echo,
logging_name=self._orig_logging_name,
use_threadlocal=self._use_threadlocal,
listeners=self.listeners)
def do_return_conn(self, conn):
try:
self._pool.put(conn, False)
except sqla_queue.Full:
conn.close()
if self._overflow_lock is None:
self._overflow -= 1
else:
self._overflow_lock.acquire()
try:
self._overflow -= 1
finally:
self._overflow_lock.release()
def do_get(self):
try:
wait = self._max_overflow > -1 and \
self._overflow >= self._max_overflow
return self._pool.get(wait, self._timeout)
except sqla_queue.Empty:
if self._max_overflow > -1 and \
self._overflow >= self._max_overflow:
if not wait:
return self.do_get()
else:
raise exc.TimeoutError(
"QueuePool limit of size %d overflow %d reached, "
"connection timed out, timeout %d" %
(self.size(), self.overflow(), self._timeout))
if self._overflow_lock is not None:
self._overflow_lock.acquire()
if self._max_overflow > -1 and \
self._overflow >= self._max_overflow:
if self._overflow_lock is not None:
self._overflow_lock.release()
return self.do_get()
try:
con = self.create_connection()
self._overflow += 1
finally:
if self._overflow_lock is not None:
self._overflow_lock.release()
return con
def dispose(self):
while True:
try:
conn = self._pool.get(False)
conn.close()
except sqla_queue.Empty:
break
self._overflow = 0 - self.size()
self.logger.info("Pool disposed. %s", self.status())
def status(self):
return "Pool size: %d Connections in pool: %d "\
"Current Overflow: %d Current Checked out "\
"connections: %d" % (self.size(),
self.checkedin(),
self.overflow(),
self.checkedout())
def size(self):
return self._pool.maxsize
def checkedin(self):
return self._pool.qsize()
def overflow(self):
return self._overflow
def checkedout(self):
return self._pool.maxsize - self._pool.qsize() + self._overflow
class NullPool(Pool):
"""A Pool which does not pool connections.
Instead it literally opens and closes the underlying DB-API connection
per each connection open/close.
Reconnect-related functions such as ``recycle`` and connection
invalidation are not supported by this Pool implementation, since
no connections are held persistently.
"""
def status(self):
return "NullPool"
def do_return_conn(self, conn):
conn.close()
def do_return_invalid(self, conn):
pass
def do_get(self):
return self.create_connection()
def recreate(self):
self.logger.info("Pool recreating")
return NullPool(self._creator,
recycle=self._recycle,
echo=self.echo,
logging_name=self._orig_logging_name,
use_threadlocal=self._use_threadlocal,
listeners=self.listeners)
def dispose(self):
pass
class StaticPool(Pool):
"""A Pool of exactly one connection, used for all requests.
Reconnect-related functions such as ``recycle`` and connection
invalidation (which is also used to support auto-reconnect) are not
currently supported by this Pool implementation but may be implemented
in a future release.
"""
@memoized_property
def _conn(self):
return self._creator()
@memoized_property
def connection(self):
return _ConnectionRecord(self)
def status(self):
return "StaticPool"
def dispose(self):
if '_conn' in self.__dict__:
self._conn.close()
self._conn = None
def recreate(self):
self.logger.info("Pool recreating")
return self.__class__(creator=self._creator,
recycle=self._recycle,
use_threadlocal=self._use_threadlocal,
reset_on_return=self._reset_on_return,
echo=self.echo,
logging_name=self._orig_logging_name,
listeners=self.listeners)
def create_connection(self):
return self._conn
def do_return_conn(self, conn):
pass
def do_return_invalid(self, conn):
pass
def do_get(self):
return self.connection
class AssertionPool(Pool):
"""A Pool that allows at most one checked out connection at any given
time.
This will raise an exception if more than one connection is checked out
at a time. Useful for debugging code that is using more connections
than desired.
"""
def __init__(self, *args, **kw):
self._conn = None
self._checked_out = False
Pool.__init__(self, *args, **kw)
def status(self):
return "AssertionPool"
def do_return_conn(self, conn):
if not self._checked_out:
raise AssertionError("connection is not checked out")
self._checked_out = False
assert conn is self._conn
def do_return_invalid(self, conn):
self._conn = None
self._checked_out = False
def dispose(self):
self._checked_out = False
if self._conn:
self._conn.close()
def recreate(self):
self.logger.info("Pool recreating")
return AssertionPool(self._creator, echo=self.echo,
logging_name=self._orig_logging_name,
listeners=self.listeners)
def do_get(self):
if self._checked_out:
raise AssertionError("connection is already checked out")
if not self._conn:
self._conn = self.create_connection()
self._checked_out = True
return self._conn
class _DBProxy(object):
"""Layers connection pooling behavior on top of a standard DB-API module.
Proxies a DB-API 2.0 connect() call to a connection pool keyed to the
specific connect parameters. Other functions and attributes are delegated
to the underlying DB-API module.
"""
def __init__(self, module, poolclass=QueuePool, **kw):
"""Initializes a new proxy.
module
a DB-API 2.0 module
poolclass
a Pool class, defaulting to QueuePool
Other parameters are sent to the Pool object's constructor.
"""
self.module = module
self.kw = kw
self.poolclass = poolclass
self.pools = {}
self._create_pool_mutex = threading.Lock()
def close(self):
for key in self.pools.keys():
del self.pools[key]
def __del__(self):
self.close()
def __getattr__(self, key):
return getattr(self.module, key)
def get_pool(self, *args, **kw):
key = self._serialize(*args, **kw)
try:
return self.pools[key]
except KeyError:
self._create_pool_mutex.acquire()
try:
if key not in self.pools:
pool = self.poolclass(lambda:
self.module.connect(*args, **kw), **self.kw)
self.pools[key] = pool
return pool
else:
return self.pools[key]
finally:
self._create_pool_mutex.release()
def connect(self, *args, **kw):
"""Activate a connection to the database.
Connect to the database using this DBProxy's module and the given
connect arguments. If the arguments match an existing pool, the
connection will be returned from the pool's current thread-local
connection instance, or if there is no thread-local connection
instance it will be checked out from the set of pooled connections.
If the pool has no available connections and allows new connections
to be created, a new database connection will be made.
"""
return self.get_pool(*args, **kw).connect()
def dispose(self, *args, **kw):
"""Dispose the pool referenced by the given connect arguments."""
key = self._serialize(*args, **kw)
try:
del self.pools[key]
except KeyError:
pass
def _serialize(self, *args, **kw):
return pickle.dumps([args, kw])
| jokajak/itweb | data/env/lib/python2.6/site-packages/SQLAlchemy-0.6.7-py2.6.egg/sqlalchemy/pool.py | Python | gpl-3.0 | 33,545 | 0.002832 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals, division
from sc2reader.utils import Length
from sc2reader.events.base import Event
from sc2reader.log_utils import loggable
from itertools import chain
@loggable
class GameEvent(Event):
"""
This is the base class for all game events. The attributes below are universally available.
"""
def __init__(self, frame, pid):
#: The id of the player generating the event. This is 16 for global non-player events.
#: Prior to Heart of the Swarm this was the player id. Since HotS it is
#: now the user id (uid), we still call it pid for backwards compatibility. You shouldn't
#: ever need to use this; use :attr:`player` instead.
self.pid = pid
#: A reference to the :class:`~sc2reader.objects.Player` object representing
#: this player in the replay. Not available for global events (:attr:`is_local` = False)
self.player = None
#: The frame of the game that this event was recorded at. 16 frames per game second.
self.frame = frame
#: The second of the game that this event was recorded at. 16 frames per game second.
self.second = frame >> 4
#: A flag indicating if it is a local or global event.
self.is_local = pid != 16
#: Short cut string for event class name
self.name = self.__class__.__name__
def _str_prefix(self):
if getattr(self, "pid", 16) == 16:
player_name = "Global"
elif self.player and not self.player.name:
player_name = "Player {0} - ({1})".format(
self.player.pid, self.player.play_race
)
elif self.player:
player_name = self.player.name
else:
player_name = "no name"
return "{0}\t{1:<15} ".format(Length(seconds=int(self.frame / 16)), player_name)
def __str__(self):
return self._str_prefix() + self.name
class GameStartEvent(GameEvent):
"""
Recorded when the game starts and the frames start to roll. This is a global non-player
event.
"""
def __init__(self, frame, pid, data):
super(GameStartEvent, self).__init__(frame, pid)
#: ???
self.data = data
class PlayerLeaveEvent(GameEvent):
"""
Recorded when a player leaves the game.
"""
def __init__(self, frame, pid, data):
super(PlayerLeaveEvent, self).__init__(frame, pid)
#: ???
self.data = data
class UserOptionsEvent(GameEvent):
"""
This event is recorded for each player at the very beginning of the game before the
:class:`GameStartEvent`.
"""
def __init__(self, frame, pid, data):
super(UserOptionsEvent, self).__init__(frame, pid)
#:
self.game_fully_downloaded = data["game_fully_downloaded"]
#:
self.development_cheats_enabled = data["development_cheats_enabled"]
#:
self.multiplayer_cheats_enabled = data["multiplayer_cheats_enabled"]
#:
self.sync_checksumming_enabled = data["sync_checksumming_enabled"]
#:
self.is_map_to_map_transition = data["is_map_to_map_transition"]
#:
self.use_ai_beacons = data["use_ai_beacons"]
#: Are workers sent to auto-mine on game start
self.starting_rally = (
data["starting_rally"] if "starting_rally" in data else None
)
#:
self.debug_pause_enabled = data["debug_pause_enabled"]
#:
self.base_build_num = data["base_build_num"]
def create_command_event(frame, pid, data):
ability_type = data["data"][0]
if ability_type == "None":
return BasicCommandEvent(frame, pid, data)
elif ability_type == "TargetUnit":
return TargetUnitCommandEvent(frame, pid, data)
elif ability_type == "TargetPoint":
return TargetPointCommandEvent(frame, pid, data)
elif ability_type == "Data":
return DataCommandEvent(frame, pid, data)
@loggable
class CommandEvent(GameEvent):
"""
Ability events are generated when ever a player in the game issues a command
to a unit or group of units. They are split into three subclasses of ability,
each with their own set of associated data. The attributes listed below are
shared across all ability event types.
See :class:`TargetPointCommandEvent`, :class:`TargetUnitCommandEvent`, and
:class:`DataCommandEvent` for individual details.
"""
def __init__(self, frame, pid, data):
super(CommandEvent, self).__init__(frame, pid)
#: Flags on the command???
self.flags = data["flags"]
#: A dictionary of possible ability flags. Flags are:
#:
#: * alternate
#: * queued
#: * preempt
#: * smart_click
#: * smart_rally
#: * subgroup
#: * set_autocast,
#: * set_autocast_on
#: * user
#: * data_a
#: * data_b
#: * data_passenger
#: * data_abil_queue_order_id,
#: * ai
#: * ai_ignore_on_finish
#: * is_order
#: * script
#: * homogenous_interruption,
#: * minimap
#: * repeat
#: * dispatch_to_other_unit
#: * target_self
#:
self.flag = dict(
alternate=0x1 & self.flags != 0,
queued=0x2 & self.flags != 0,
preempt=0x4 & self.flags != 0,
smart_click=0x8 & self.flags != 0,
smart_rally=0x10 & self.flags != 0,
subgroup=0x20 & self.flags != 0,
set_autocast=0x40 & self.flags != 0,
set_autocast_on=0x80 & self.flags != 0,
user=0x100 & self.flags != 0,
data_a=0x200 & self.flags != 0,
data_passenger=0x200 & self.flags != 0, # alt-name
data_b=0x400 & self.flags != 0,
data_abil_queue_order_id=0x400 & self.flags != 0, # alt-name
ai=0x800 & self.flags != 0,
ai_ignore_on_finish=0x1000 & self.flags != 0,
is_order=0x2000 & self.flags != 0,
script=0x4000 & self.flags != 0,
homogenous_interruption=0x8000 & self.flags != 0,
minimap=0x10000 & self.flags != 0,
repeat=0x20000 & self.flags != 0,
dispatch_to_other_unit=0x40000 & self.flags != 0,
target_self=0x80000 & self.flags != 0,
)
#: Flag marking that the command had ability information
self.has_ability = data["ability"] is not None
#: Link the the ability group
self.ability_link = data["ability"]["ability_link"] if self.has_ability else 0
#: The index of the ability in the ability group
self.command_index = (
data["ability"]["ability_command_index"] if self.has_ability else 0
)
#: Additional ability data.
self.ability_data = (
data["ability"]["ability_command_data"] if self.has_ability else 0
)
#: Unique identifier for the ability
self.ability_id = self.ability_link << 5 | self.command_index
#: A reference to the ability being used
self.ability = None
#: A shortcut to the name of the ability being used
self.ability_name = ""
#: The type of ability, one of: None (no target), TargetPoint, TargetUnit, or Data
self.ability_type = data["data"][0]
#: The raw data associated with this ability type
self.ability_type_data = data["data"][1]
#: Other unit id??
self.other_unit_id = data["other_unit_tag"]
#: A reference to the other unit
self.other_unit = None
def __str__(self):
string = self._str_prefix()
if self.has_ability:
string += "Ability ({0:X})".format(self.ability_id)
if self.ability:
string += " - {0}".format(self.ability.name)
else:
string += "Right Click"
if self.ability_type == "TargetUnit":
string += "; Target: {0} [{1:0>8X}]".format(
self.target.name, self.target_unit_id
)
if self.ability_type in ("TargetPoint", "TargetUnit"):
string += "; Location: {0}".format(str(self.location))
return string
class BasicCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
This event is recorded for events that have no extra information recorded.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(BasicCommandEvent, self).__init__(frame, pid, data)
class TargetPointCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
This event is recorded when ever a player issues a command that targets a location
and NOT a unit. Commands like Psistorm, Attack Move, Fungal Growth, and EMP fall
under this category.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(TargetPointCommandEvent, self).__init__(frame, pid, data)
#: The x coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.x = self.ability_type_data["point"].get("x", 0) / 4096.0
#: The y coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.y = self.ability_type_data["point"].get("y", 0) / 4096.0
#: The z coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.z = self.ability_type_data["point"].get("z", 0)
#: The location of the target. Available for TargetPoint and TargetUnit type events
self.location = (self.x, self.y, self.z)
class TargetUnitCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
This event is recorded when ever a player issues a command that targets a unit.
The location of the target unit at the time of the command is also recorded. Commands like
Chronoboost, Transfuse, and Snipe fall under this category.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(TargetUnitCommandEvent, self).__init__(frame, pid, data)
#: Flags set on the target unit. Available for TargetUnit type events
self.target_flags = self.ability_type_data.get("flags", None)
#: Timer?? Available for TargetUnit type events.
self.target_timer = self.ability_type_data.get("timer", None)
#: Unique id of the target unit. Available for TargetUnit type events.
#: This id can be 0 when the target unit is shrouded by fog of war.
self.target_unit_id = self.ability_type_data.get("unit_tag", None)
#: A reference to the targeted unit. When the :attr:`target_unit_id` is
#: 0 this target unit is a generic, reused fog of war unit of the :attr:`target_unit_type`
#: with an id of zero. It should not be confused with a real unit.
self.target_unit = None
#: Current integer type id of the target unit. Available for TargetUnit type events.
self.target_unit_type = self.ability_type_data.get("unit_link", None)
#: Integer player id of the controlling player. Available for TargetUnit type events starting in 19595.
#: When the targeted unit is under fog of war this id is zero.
self.control_player_id = self.ability_type_data.get("control_player_id", None)
#: Integer player id of the player paying upkeep. Available for TargetUnit type events.
self.upkeep_player_id = self.ability_type_data.get("upkeep_player_id", None)
#: The x coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.x = self.ability_type_data["point"].get("x", 0) / 4096.0
#: The y coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.y = self.ability_type_data["point"].get("y", 0) / 4096.0
#: The z coordinate of the target. Available for TargetPoint and TargetUnit type events.
self.z = self.ability_type_data["point"].get("z", 0)
#: The location of the target. Available for TargetPoint and TargetUnit type events
self.location = (self.x, self.y, self.z)
class UpdateTargetPointCommandEvent(TargetPointCommandEvent):
"""
Extends :class: 'TargetPointCommandEvent'
This event is generated when the user changes the point of a unit. Appears to happen
when a unit is moving and it is given a new command. It's possible there are other
instances of this occurring.
"""
name = "UpdateTargetPointCommandEvent"
class UpdateTargetUnitCommandEvent(TargetUnitCommandEvent):
"""
Extends :class:`TargetUnitCommandEvent`
This event is generated when a TargetUnitCommandEvent is updated, likely due to
changing the target unit. It is unclear if this needs to be a separate event
from TargetUnitCommandEvent, but for flexibility, it will be treated
differently.
One example of this event occurring is casting inject on a hatchery while
holding shift, and then shift clicking on a second hatchery.
"""
name = "UpdateTargetUnitCommandEvent"
class DataCommandEvent(CommandEvent):
"""
Extends :class:`CommandEvent`
DataCommandEvent are recorded when ever a player issues a command that has no target. Commands
like Burrow, SeigeMode, Train XYZ, and Stop fall under this category.
Note that like all CommandEvents, the event will be recorded regardless
of whether or not the command was successful.
"""
def __init__(self, frame, pid, data):
super(DataCommandEvent, self).__init__(frame, pid, data)
#: Other target data. Available for Data type events.
self.target_data = self.ability_type_data.get("data", None)
@loggable
class CommandManagerStateEvent(GameEvent):
"""
These events indicated that the last :class:`CommandEvent` called has been
called again. For example, if you add three SCVs to an empty queue on a
Command Center, the first add will be generate a :class:`BasicCommandEvent`
and the two subsequent adds will each generate a
:class:`CommandManagerStateEvent`.
"""
def __init__(self, frame, pid, data):
super(CommandManagerStateEvent, self).__init__(frame, pid)
#: Always 1?
self.state = data["state"]
#: An index identifying how many events of this type have been called
self.sequence = data["sequence"]
@loggable
class SelectionEvent(GameEvent):
"""
Selection events are generated when ever the active selection of the
player is updated. Unlike other game events, these events can also be
generated by non-player actions like unit deaths or transformations.
Starting in Starcraft 2.0.0, selection events targeting control group
buffers are also generated when control group selections are modified
by non-player actions. When a player action updates a control group
a :class:`ControlGroupEvent` is generated.
"""
def __init__(self, frame, pid, data):
super(SelectionEvent, self).__init__(frame, pid)
#: The control group being modified. 10 for active selection
self.control_group = data["control_group_index"]
#: Deprecated, use control_group
self.bank = self.control_group
#: ???
self.subgroup_index = data["subgroup_index"]
#: The type of mask to apply. One of None, Mask, OneIndices, ZeroIndices
self.mask_type = data["remove_mask"][0]
#: The data for the mask
self.mask_data = data["remove_mask"][1]
#: The unit type data for the new units
self.new_unit_types = [
(
d["unit_link"],
d["subgroup_priority"],
d["intra_subgroup_priority"],
d["count"],
)
for d in data["add_subgroups"]
]
#: The unit id data for the new units
self.new_unit_ids = data["add_unit_tags"]
# This stretches out the unit types and priorities to be zipped with ids.
unit_types = chain(
*[
[utype] * count
for (
utype,
subgroup_priority,
intra_subgroup_priority,
count,
) in self.new_unit_types
]
)
unit_subgroup_priorities = chain(
*[
[subgroup_priority] * count
for (
utype,
subgroup_priority,
intra_subgroup_priority,
count,
) in self.new_unit_types
]
)
unit_intra_subgroup_priorities = chain(
*[
[intra_subgroup_priority] * count
for (
utype,
subgroup_priority,
intra_subgroup_priority,
count,
) in self.new_unit_types
]
)
#: The combined type and id information for new units
self.new_unit_info = list(
zip(
self.new_unit_ids,
unit_types,
unit_subgroup_priorities,
unit_intra_subgroup_priorities,
)
)
#: A list of references to units added by this selection
self.new_units = None
#: Deprecated, see new_units
self.objects = None
def __str__(self):
if self.new_units:
return GameEvent.__str__(self) + str([str(u) for u in self.new_units])
else:
return GameEvent.__str__(self) + str([str(u) for u in self.new_unit_info])
def create_control_group_event(frame, pid, data):
update_type = data["control_group_update"]
if update_type == 0:
return SetControlGroupEvent(frame, pid, data)
elif update_type == 1:
return AddToControlGroupEvent(frame, pid, data)
elif update_type == 2:
return GetControlGroupEvent(frame, pid, data)
elif update_type == 3:
# TODO: What could this be?!?
return ControlGroupEvent(frame, pid, data)
else:
# No idea what this is but we're seeing update_types of 4 and 5 in 3.0
return ControlGroupEvent(frame, pid, data)
@loggable
class ControlGroupEvent(GameEvent):
"""
ControlGroup events are recorded when ever a player action modifies or accesses a control
group. There are three kinds of events, generated by each of the possible
player actions:
* :class:`SetControlGroup` - Recorded when a user sets a control group (ctrl+#).
* :class:`GetControlGroup` - Recorded when a user retrieves a control group (#).
* :class:`AddToControlGroup` - Recorded when a user adds to a control group (shift+ctrl+#)
All three events have the same set of data (shown below) but are interpreted differently.
See the class entry for details.
"""
def __init__(self, frame, pid, data):
super(ControlGroupEvent, self).__init__(frame, pid)
#: Index to the control group being modified
self.control_group = data["control_group_index"]
#: Deprecated, use control_group
self.bank = self.control_group
#: Deprecated, use control_group
self.hotkey = self.control_group
#: The type of update being performed, 0 (set),1 (add),2 (get)
self.update_type = data["control_group_update"]
#: The type of mask to apply. One of None, Mask, OneIndices, ZeroIndices
self.mask_type = data["remove_mask"][0]
#: The data for the mask
self.mask_data = data["remove_mask"][1]
class SetControlGroupEvent(ControlGroupEvent):
"""
Extends :class:`ControlGroupEvent`
This event does a straight forward replace of the current control group contents
with the player's current selection. This event doesn't have masks set.
"""
class AddToControlGroupEvent(SetControlGroupEvent):
"""
Extends :class:`ControlGroupEvent`
This event adds the current selection to the control group.
"""
class GetControlGroupEvent(ControlGroupEvent):
"""
Extends :class:`ControlGroupEvent`
This event replaces the current selection with the contents of the control group.
The mask data is used to limit that selection to units that are currently selectable.
You might have 1 medivac and 8 marines on the control group but if the 8 marines are
inside the medivac they cannot be part of your selection.
"""
@loggable
class CameraEvent(GameEvent):
"""
Camera events are generated when ever the player camera moves, zooms, or rotates.
It does not matter why the camera changed, this event simply records the current
state of the camera after changing.
"""
def __init__(self, frame, pid, data):
super(CameraEvent, self).__init__(frame, pid)
#: The x coordinate of the center of the camera
self.x = (data["target"]["x"] if data["target"] is not None else 0) / 256.0
#: The y coordinate of the center of the camera
self.y = (data["target"]["y"] if data["target"] is not None else 0) / 256.0
#: The location of the center of the camera
self.location = (self.x, self.y)
#: The distance to the camera target ??
self.distance = data["distance"]
#: The current pitch of the camera
self.pitch = data["pitch"]
#: The current yaw of the camera
self.yaw = data["yaw"]
def __str__(self):
return self._str_prefix() + "{0} at ({1}, {2})".format(
self.name, self.x, self.y
)
@loggable
class ResourceTradeEvent(GameEvent):
"""
Generated when a player trades resources with another player. But not when fullfulling
resource requests.
"""
def __init__(self, frame, pid, data):
super(ResourceTradeEvent, self).__init__(frame, pid)
#: The id of the player sending the resources
self.sender_id = pid
#: A reference to the player sending the resources
self.sender = None
#: The id of the player receiving the resources
self.recipient_id = data["recipient_id"]
#: A reference to the player receiving the resources
self.recipient = None
#: An array of resources sent
self.resources = data["resources"]
#: Amount minerals sent
self.minerals = self.resources[0] if len(self.resources) >= 1 else None
#: Amount vespene sent
self.vespene = self.resources[1] if len(self.resources) >= 2 else None
#: Amount terrazine sent
self.terrazine = self.resources[2] if len(self.resources) >= 3 else None
#: Amount custom resource sent
self.custom_resource = self.resources[3] if len(self.resources) >= 4 else None
def __str__(self):
return self._str_prefix() + " transfer {0} minerals, {1} gas, {2} terrazine, and {3} custom to {4}".format(
self.minerals,
self.vespene,
self.terrazine,
self.custom_resource,
self.recipient,
)
class ResourceRequestEvent(GameEvent):
"""
Generated when a player creates a resource request.
"""
def __init__(self, frame, pid, data):
super(ResourceRequestEvent, self).__init__(frame, pid)
#: An array of resources sent
self.resources = data["resources"]
#: Amount minerals sent
self.minerals = self.resources[0] if len(self.resources) >= 1 else None
#: Amount vespene sent
self.vespene = self.resources[1] if len(self.resources) >= 2 else None
#: Amount terrazine sent
self.terrazon = self.resources[2] if len(self.resources) >= 3 else None
#: Amount custom resource sent
self.custom_resource = self.resources[3] if len(self.resources) >= 4 else None
def __str__(self):
return (
self._str_prefix()
+ " requests {0} minerals, {1} gas, {2} terrazine, and {3} custom".format(
self.minerals, self.vespene, self.terrazine, self.custom_resource
)
)
class ResourceRequestFulfillEvent(GameEvent):
"""
Generated when a player accepts a resource request.
"""
def __init__(self, frame, pid, data):
super(ResourceRequestFulfillEvent, self).__init__(frame, pid)
#: The id of the request being fulfilled
self.request_id = data["request_id"]
class ResourceRequestCancelEvent(GameEvent):
"""
Generated when a player cancels their resource request.
"""
def __init__(self, frame, pid, data):
super(ResourceRequestCancelEvent, self).__init__(frame, pid)
#: The id of the request being cancelled
self.request_id = data["request_id"]
class HijackReplayGameEvent(GameEvent):
"""
Generated when players take over from a replay.
"""
def __init__(self, frame, pid, data):
super(HijackReplayGameEvent, self).__init__(frame, pid)
#: The method used. Not sure what 0/1 represent
self.method = data["method"]
#: Information on the users hijacking the game
self.user_infos = data["user_infos"]
| ggtracker/sc2reader | sc2reader/events/game.py | Python | mit | 25,673 | 0.002454 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from cms.models import Page
from cms.utils.i18n import get_language_list
from django.db import migrations, models
def forwards(apps, schema_editor):
BlogConfig = apps.get_model('djangocms_blog', 'BlogConfig')
BlogConfigTranslation = apps.get_model('djangocms_blog', 'BlogConfigTranslation')
Post = apps.get_model('djangocms_blog', 'Post')
BlogCategory = apps.get_model('djangocms_blog', 'BlogCategory')
GenericBlogPlugin = apps.get_model('djangocms_blog', 'GenericBlogPlugin')
LatestPostsPlugin = apps.get_model('djangocms_blog', 'LatestPostsPlugin')
AuthorEntriesPlugin = apps.get_model('djangocms_blog', 'AuthorEntriesPlugin')
config = None
for page in Page.objects.drafts().filter(application_urls='BlogApp'):
config, created = BlogConfig.objects.get_or_create(namespace=page.application_namespace)
if not BlogConfigTranslation.objects.exists():
for lang in get_language_list():
title = page.get_title(lang)
translation = BlogConfigTranslation.objects.create(language_code=lang, master_id=config.pk, app_title=title)
if config:
for model in (Post, BlogCategory, GenericBlogPlugin, LatestPostsPlugin, AuthorEntriesPlugin):
for item in model.objects.filter(app_config__isnull=True):
item.app_config = config
item.save()
def backwards(apps, schema_editor):
# No need for backward data migration
pass
class Migration(migrations.Migration):
dependencies = [
('cms', '0004_auto_20140924_1038'),
('djangocms_blog', '0013_auto_20160201_2235'),
]
operations = [
migrations.RunPython(forwards, backwards),
]
| skirsdeda/djangocms-blog | djangocms_blog/migrations/0014_auto_20160215_1331.py | Python | bsd-3-clause | 1,773 | 0.003384 |
from __future__ import absolute_import
from sentry.identity.vsts import VSTSIdentityProvider
from sentry.integrations.exceptions import IntegrationError
from sentry.integrations.vsts import VstsIntegration, VstsIntegrationProvider
from sentry.models import (
Integration, IntegrationExternalProject, OrganizationIntegration, Repository,
Project
)
from sentry.plugins import plugins
from tests.sentry.plugins.testutils import VstsPlugin # NOQA
from .testutils import VstsIntegrationTestCase, CREATE_SUBSCRIPTION
class VstsIntegrationProviderTest(VstsIntegrationTestCase):
# Test data setup in ``VstsIntegrationTestCase``
def test_basic_flow(self):
self.assert_installation()
integration = Integration.objects.get(provider='vsts')
assert integration.external_id == self.vsts_account_id
assert integration.name == self.vsts_account_name
metadata = integration.metadata
assert metadata['scopes'] == list(VSTSIdentityProvider.oauth_scopes)
assert metadata['subscription']['id'] == \
CREATE_SUBSCRIPTION['publisherInputs']['tfsSubscriptionId']
assert metadata['domain_name'] == '{}.visualstudio.com'.format(
self.vsts_account_name
)
def test_migrate_repositories(self):
accessible_repo = Repository.objects.create(
organization_id=self.organization.id,
name=self.project_a['name'],
url='https://{}.visualstudio.com/DefaultCollection/_git/{}'.format(
self.vsts_account_name,
self.repo_name,
),
provider='visualstudio',
external_id=self.repo_id,
)
inaccessible_repo = Repository.objects.create(
organization_id=self.organization.id,
name='NotReachable',
url='https://randoaccount.visualstudio.com/Product/_git/NotReachable',
provider='visualstudio',
external_id='123456789',
)
self.assert_installation()
integration = Integration.objects.get(provider='vsts')
assert Repository.objects.get(
id=accessible_repo.id,
).integration_id == integration.id
assert Repository.objects.get(
id=inaccessible_repo.id,
).integration_id is None
def setupPluginTest(self):
self.project = Project.objects.create(
organization_id=self.organization.id,
)
self.plugin = plugins.get('vsts')
self.plugin.enable(self.project)
def test_disabled_plugin_when_fully_migrated(self):
self.setupPluginTest()
Repository.objects.create(
organization_id=self.organization.id,
name=self.project_a['name'],
url='https://{}.visualstudio.com/DefaultCollection/_git/{}'.format(
self.vsts_account_name,
self.repo_name,
),
provider='visualstudio',
external_id=self.repo_id,
)
# Enabled before Integration installation
assert 'vsts' in [p.slug for p in plugins.for_project(self.project)]
self.assert_installation()
# Disabled
assert 'vsts' not in [p.slug for p in plugins.for_project(self.project)]
def test_doesnt_disable_plugin_when_partially_migrated(self):
self.setupPluginTest()
# Repo accessible by new Integration
Repository.objects.create(
organization_id=self.organization.id,
name=self.project_a['name'],
url='https://{}.visualstudio.com/DefaultCollection/_git/{}'.format(
self.vsts_account_name,
self.repo_name,
),
provider='visualstudio',
external_id=self.repo_id,
)
# Inaccessible Repo - causes plugin to stay enabled
Repository.objects.create(
organization_id=self.organization.id,
name='NotReachable',
url='https://randoaccount.visualstudio.com/Product/_git/NotReachable',
provider='visualstudio',
external_id='123456789',
)
self.assert_installation()
# Still enabled
assert 'vsts' in [p.slug for p in plugins.for_project(self.project)]
def test_build_integration(self):
state = {
'account': {
'AccountName': self.vsts_account_name,
'AccountId': self.vsts_account_id,
},
'instance': '{}.visualstudio.com'.format(self.vsts_account_name),
'identity': {
'data': {
'access_token': self.access_token,
'expires_in': '3600',
'refresh_token': self.refresh_token,
'token_type': 'jwt-bearer',
},
},
}
integration = VstsIntegrationProvider()
integration_dict = integration.build_integration(state)
assert integration_dict['name'] == self.vsts_account_name
assert integration_dict['external_id'] == self.vsts_account_id
assert integration_dict['metadata']['domain_name'] == \
'{}.visualstudio.com'.format(self.vsts_account_name)
assert integration_dict['user_identity']['type'] == 'vsts'
assert integration_dict['user_identity']['external_id'] == \
self.vsts_account_id
assert integration_dict['user_identity']['scopes'] == sorted(
VSTSIdentityProvider.oauth_scopes)
def test_webhook_subscription_created_once(self):
self.assert_installation()
state = {
'account': {
'AccountName': self.vsts_account_name,
'AccountId': self.vsts_account_id,
},
'instance': '{}.visualstudio.com'.format(self.vsts_account_name),
'identity': {
'data': {
'access_token': self.access_token,
'expires_in': '3600',
'refresh_token': self.refresh_token,
'token_type': 'jwt-bearer',
},
},
}
# The above already created the Webhook, so subsequent calls to
# ``build_integration`` should omit that data.
data = VstsIntegrationProvider().build_integration(state)
assert 'subscription' not in data['metadata']
def test_fix_subscription(self):
external_id = '1234567890'
Integration.objects.create(
metadata={},
provider='vsts',
external_id=external_id,
)
data = VstsIntegrationProvider().build_integration({
'account': {
'AccountName': self.vsts_account_name,
'AccountId': external_id,
},
'instance': '{}.visualstudio.com'.format(self.vsts_account_name),
'identity': {
'data': {
'access_token': self.access_token,
'expires_in': '3600',
'refresh_token': self.refresh_token,
'token_type': 'jwt-bearer',
},
},
})
assert external_id == data['external_id']
subscription = data['metadata']['subscription']
assert subscription['id'] is not None and subscription['secret'] is not None
class VstsIntegrationTest(VstsIntegrationTestCase):
def test_get_organization_config(self):
self.assert_installation()
integration = Integration.objects.get(provider='vsts')
fields = integration.get_installation(
integration.organizations.first().id
).get_organization_config()
assert [field['name'] for field in fields] == [
'sync_status_forward',
'sync_forward_assignment',
'sync_comments',
'sync_status_reverse',
'sync_reverse_assignment',
]
def test_update_organization_config_remove_all(self):
self.assert_installation()
model = Integration.objects.get(provider='vsts')
integration = VstsIntegration(model, self.organization.id)
org_integration = OrganizationIntegration.objects.get(
organization_id=self.organization.id,
)
data = {
'sync_status_forward': {},
'other_option': 'hello',
}
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=1,
resolved_status='ResolvedStatus1',
unresolved_status='UnresolvedStatus1',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=2,
resolved_status='ResolvedStatus2',
unresolved_status='UnresolvedStatus2',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=3,
resolved_status='ResolvedStatus3',
unresolved_status='UnresolvedStatus3',
)
integration.update_organization_config(data)
external_projects = IntegrationExternalProject.objects \
.all() \
.values_list('external_id', flat=True)
assert list(external_projects) == []
config = OrganizationIntegration.objects.get(
organization_id=org_integration.organization_id,
integration_id=org_integration.integration_id
).config
assert config == {
'sync_status_forward': False,
'other_option': 'hello',
}
def test_update_organization_config(self):
self.assert_installation()
org_integration = OrganizationIntegration.objects.get(
organization_id=self.organization.id,
)
model = Integration.objects.get(provider='vsts')
integration = VstsIntegration(model, self.organization.id)
# test validation
data = {
'sync_status_forward': {
1: {
'on_resolve': '',
'on_unresolve': 'UnresolvedStatus1',
},
},
}
with self.assertRaises(IntegrationError):
integration.update_organization_config(data)
data = {
'sync_status_forward': {
1: {
'on_resolve': 'ResolvedStatus1',
'on_unresolve': 'UnresolvedStatus1',
},
2: {
'on_resolve': 'ResolvedStatus2',
'on_unresolve': 'UnresolvedStatus2',
},
4: {
'on_resolve': 'ResolvedStatus4',
'on_unresolve': 'UnresolvedStatus4',
},
},
'other_option': 'hello',
}
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=1,
resolved_status='UpdateMe',
unresolved_status='UpdateMe',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=2,
resolved_status='ResolvedStatus2',
unresolved_status='UnresolvedStatus2',
)
IntegrationExternalProject.objects.create(
organization_integration_id=org_integration.id,
external_id=3,
resolved_status='ResolvedStatus3',
unresolved_status='UnresolvedStatus3',
)
integration.update_organization_config(data)
external_projects = IntegrationExternalProject.objects \
.all() \
.order_by('external_id')
assert external_projects[0].external_id == '1'
assert external_projects[0].resolved_status == 'ResolvedStatus1'
assert external_projects[0].unresolved_status == 'UnresolvedStatus1'
assert external_projects[1].external_id == '2'
assert external_projects[1].resolved_status == 'ResolvedStatus2'
assert external_projects[1].unresolved_status == 'UnresolvedStatus2'
assert external_projects[2].external_id == '4'
assert external_projects[2].resolved_status == 'ResolvedStatus4'
assert external_projects[2].unresolved_status == 'UnresolvedStatus4'
config = OrganizationIntegration.objects.get(
organization_id=org_integration.organization_id,
integration_id=org_integration.integration_id
).config
assert config == {
'sync_status_forward': True,
'other_option': 'hello',
}
| ifduyue/sentry | tests/sentry/integrations/vsts/test_integration.py | Python | bsd-3-clause | 12,757 | 0.000392 |
"""
Django settings for timeline project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nv&mfrq1ou*#1%hq7_8o)vf24$ar09m(*)oras0-mzmg!bwjlu'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'grappelli',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
'timeline',
'corsheaders',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'timeline.urls'
WSGI_APPLICATION = 'timeline.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'timeline-db',
'USER': 'postgres',
'PASSWORD': 'zawnza',
'HOST': 'localhost',
'PORT': '',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.FileSystemFinder',
)
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
# Media files (User uploaded files)
# WARNING: This is (probably) not a sensible configuration for prod.
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Template Directories
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
)
CORS_ORIGIN_ALLOW_ALL = True
| mthpower/timeline-blog | timeline/timeline/settings.py | Python | mit | 2,922 | 0.000342 |
"""Translate cli commands to non-cli code."""
import logging
from urllib.error import HTTPError, URLError
import requests
from kytos.utils.config import KytosConfig
LOG = logging.getLogger(__name__)
class WebAPI: # pylint: disable=too-few-public-methods
"""An API for the command-line interface."""
@classmethod
def update(cls, args):
"""Call the method to update the Web UI."""
kytos_api = KytosConfig().config.get('kytos', 'api')
url = f"{kytos_api}api/kytos/core/web/update"
version = args["<version>"]
if version:
url += f"/{version}"
try:
result = requests.post(url)
except(HTTPError, URLError, requests.exceptions.ConnectionError):
LOG.error("Can't connect to server: %s", kytos_api)
return
if result.status_code != 200:
LOG.info("Error while updating web ui: %s", result.content)
else:
LOG.info("Web UI updated.")
| kytos/kytos-utils | kytos/cli/commands/web/api.py | Python | mit | 986 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
import sys
from os_net_config import cli
from os_net_config.tests import base
import six
REALPATH = os.path.dirname(os.path.realpath(__file__))
SAMPLE_BASE = os.path.join(REALPATH, '../../', 'etc',
'os-net-config', 'samples')
class TestCli(base.TestCase):
def run_cli(self, argstr, exitcodes=(0,)):
orig = sys.stdout
orig_stderr = sys.stderr
sys.stdout = six.StringIO()
sys.stderr = six.StringIO()
ret = cli.main(argstr.split())
self.assertIn(ret, exitcodes)
stdout = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = orig
stderr = sys.stderr.getvalue()
sys.stderr.close()
sys.stderr = orig_stderr
return (stdout, stderr)
def test_bond_noop_output(self):
bond_yaml = os.path.join(SAMPLE_BASE, 'bond.yaml')
bond_json = os.path.join(SAMPLE_BASE, 'bond.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % bond_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop '
'-c %s' % bond_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=br-ctlplane',
'DEVICE=em2',
'DEVICE=em1',
'DEVICE=bond1',
'DEVICETYPE=ovs']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_bridge_noop_output(self):
bridge_yaml = os.path.join(SAMPLE_BASE, 'bridge_dhcp.yaml')
bridge_json = os.path.join(SAMPLE_BASE, 'bridge_dhcp.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=eni --noop -c %s' %
bridge_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=eni --noop -c %s' %
bridge_json)
self.assertEqual('', stderr)
sanity_devices = ['iface br-ctlplane inet dhcp',
'iface em1',
'ovs_type OVSBridge']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_vlan_noop_output(self):
vlan_yaml = os.path.join(SAMPLE_BASE, 'bridge_vlan.yaml')
vlan_json = os.path.join(SAMPLE_BASE, 'bridge_vlan.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% vlan_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% vlan_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=br-ctlplane',
'DEVICE=em1',
'DEVICE=vlan16',
'DEVICETYPE=ovs']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
def test_interface_noop_output(self):
interface_yaml = os.path.join(SAMPLE_BASE, 'interface.yaml')
interface_json = os.path.join(SAMPLE_BASE, 'interface.json')
stdout_yaml, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% interface_yaml)
self.assertEqual('', stderr)
stdout_json, stderr = self.run_cli('ARG0 --provider=ifcfg --noop -c %s'
% interface_json)
self.assertEqual('', stderr)
sanity_devices = ['DEVICE=em1',
'BOOTPROTO=static',
'IPADDR=192.0.2.1']
for dev in sanity_devices:
self.assertIn(dev, stdout_yaml)
self.assertEqual(stdout_yaml, stdout_json)
| fredericlepied/os-net-config | os_net_config/tests/test_cli.py | Python | apache-2.0 | 4,656 | 0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.