text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#! /usr/bin/env python
from openturns import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
distribution = ClaytonCopula(1.5)
size = 1000
sample = distribution.getSample(size)
factory = ClaytonCopulaFactory()
estimatedDistribution = factory.build(sample)
print "distribution=", repr(distribution)
print "Estimated distribution=", repr(estimatedDistribution)
estimatedDistribution = factory.build()
print "Default distribution=", estimatedDistribution
estimatedDistribution = factory.build(
distribution.getParametersCollection())
print "Distribution from parameters=", estimatedDistribution
estimatedClaytonCopula = factory.buildAsClaytonCopula(sample)
print "ClaytonCopula =", distribution
print "Estimated claytonCopula=", estimatedClaytonCopula
estimatedClaytonCopula = factory.buildAsClaytonCopula()
print "Default claytonCopula=", estimatedClaytonCopula
estimatedClaytonCopula = factory.buildAsClaytonCopula(
distribution.getParametersCollection())
print "ClaytonCopula from parameters=", estimatedClaytonCopula
except:
import sys
print "t_ClaytonCopulaFactory_std.py", sys.exc_type, sys.exc_value
| sofianehaddad/ot-svn | python/test/t_ClaytonCopulaFactory_std.py | Python | mit | 1,214 | 0.000824 |
# -*- coding: utf-8 -*-
#
# Copyright 2011, Alexandre Strzelewicz
# Licensed under the MIT Version
#
#########################################################
#
# Widget generic controller
#
#########################################################
xss = local_import('xss')
def can_modify():
if session.can_modify == False:
raise HTTP(404)
def get_widget():
session.forget(response)
widgets = db((db.widgets.desktop_link==session.desktop_id)).select()
return dict(widgets=widgets)
def new_widget():
can_modify()
# Xss prevention
for req in request.vars:
request.vars[req] = xss.xssescape(request.vars[req])
ids = db.widgets.insert(x=request.vars.x,
y=request.vars.y,
width=request.vars.width,
height=request.vars.height,
type=request.vars.type,
data1=request.vars.data1,
data2=request.vars.data2,
data3=request.vars.data3,
title=request.vars.title,
user_link=auth.user.id,
desktop_link=session.desktop_id)
return response.json({'success':'true', 'id':ids})
def remove_widget():
can_modify()
row = db((db.widgets.user_link==auth.user.id)
& (db.widgets.desktop_link==session.desktop_id)
& (db.widgets.id==request.vars.id)).delete()
return response.json({'success':'true'})
#@security.xssremove
def update_widget():
can_modify()
# Xss prevention
for req in request.vars:
request.vars[req] = xss.xssescape(request.vars[req])
db((db.widgets.user_link==auth.user.id)
& (db.widgets.desktop_link==session.desktop_id)
& (db.widgets.id==request.vars.id)) \
.update(x=request.vars.x,
y=request.vars.y,
width=request.vars.width,
height=request.vars.height,
type=request.vars.type,
data1=request.vars.data1,
data2=request.vars.data2,
data3=request.vars.data3,
title=request.vars.title)
return response.json({'success':'true'})
#
# entr widgets to share widget (to put in desk.py)
#
# widgets = db((db.desktop.id==db.entr_desktop_widgets.desktop_link)
# & (db.widgets_entr.id==db.entr_desktop_widgets.widget_link)
# & (db.desktop.id==desktop.id))\
# .select(db.widgets_entr.ALL)
# logger.debug(widgets)
#
#
#
# def new_widget_entr():
# widget = db.widgets_entr.insert(x=request.vars.x,
# y=request.vars.y,
# width=request.vars.width,
# height=request.vars.height,
# type=request.vars.type,
# data1=request.vars.data1,
# data2=request.vars.data2,
# data3=request.vars.data3,
# title=request.vars.title)
# db.entr_desktop_widgets.insert(desktop_link=session.desktop_id,
# widget_link=widget.id)
# return response.json({'success':'true', 'id':widget.id})
| Unitech/Skytoop | controllers/widget.py | Python | mit | 3,369 | 0.006827 |
"""
Given an integer, write an algorithm to convert it to hexadecimal. For negative integer, two's complement method is used.
Note:
All letters in hexadecimal (a-f) must be in lowercase.
The hexadecimal string must not contain extra leading 0s.
If the number is zero, it is represented by a single zero character '0'; otherwise,
the first character in the hexadecimal string will not be the zero character.
The given number is guaranteed to fit within the range of a 32-bit signed integer.
You must not use any method provided by the library which converts/formats the number to hex directly.
Example 1:
Input:
26
Output:
"1a"
Example 2:
Input:
-1
Output:
"ffffffff"
"""
class Solution(object):
def toHex(self, num):
"""
:type num: int
:rtype: str
"""
if 0 == num:
return "0"
# both OK
mapping = dict(zip(range(0, 16), "0123456789abcdef"))
mapping = "0123456789abcdef"
if num < 0:
num += 2 ** 32
remains = []
while num:
remains.append(mapping[num % 16])
num /= 16
return "".join(remains[::-1])
| danielsunzhongyuan/my_leetcode_in_python | convert_a_number_to_hexadecimal_405.py | Python | apache-2.0 | 1,152 | 0.005208 |
import pysam
from readtagger.pysamtools_view import view
INPUT = 'tagged_dm6.bam'
def test_pysamtoolsview(datadir_copy, tmpdir): # noqa: D103
input_bam = str(datadir_copy[INPUT])
output_bam = tmpdir.join('out.bam').strpath
region = '3R:8121625-8121731'
view(input_bam=input_bam, output_bam=output_bam, region=region)
assert len(pysam.AlignmentFile(output_bam).header['SQ']) == 1
| bardin-lab/readtagger | tests/test_pysamtools_view.py | Python | mit | 404 | 0 |
#!/usr/bin/env python -i
# preceeding line should have path for Python on your machine
# plot.py
# Purpose: plot Temp of running LIGGGHTS simulation via GnuPlot in Pizza.py
# Syntax: plot.py in.liggghts Nfreq Nsteps compute-ID
# in.liggghts = LIGGGHTS input script
# Nfreq = plot data point every this many steps
# Nsteps = run for this many steps
# compute-ID = ID of compute that calculates temperature
# (or any other scalar quantity)
import sys
sys.path.append("./pizza")
from gnu import gnu
# parse command line
argv = sys.argv
if len(argv) != 5:
print "Syntax: plot.py in.liggghts Nfreq Nsteps compute-ID"
sys.exit()
infile = sys.argv[1]
nfreq = int(sys.argv[2])
nsteps = int(sys.argv[3])
compute = sys.argv[4]
me = 0
# uncomment if running in parallel via Pypar
#import pypar
#me = pypar.rank()
#nprocs = pypar.size()
from liggghts import liggghts
lmp = liggghts()
# run infile all at once
# assumed to have no run command in it
lmp.file(infile)
lmp.command("thermo %d" % nfreq)
# initial 0-step run to generate initial 1-point plot
lmp.command("run 0 pre yes post no")
value = lmp.extract_compute(compute,0,0)
ntimestep = 0
xaxis = [ntimestep]
yaxis = [value]
# wrapper on GnuPlot via Pizza.py gnu tool
# just proc 0 handles plotting
if me == 0:
gn = gnu()
gn.plot(xaxis,yaxis)
gn.xrange(0,nsteps)
gn.title(compute,"Timestep","Temperature")
# run nfreq steps at a time w/out pre/post, query compute, refresh plot
while ntimestep < nsteps:
lmp.command("run %d pre no post no" % nfreq)
ntimestep += nfreq
value = lmp.extract_compute(compute,0,0)
xaxis.append(ntimestep)
yaxis.append(value)
if me == 0: gn.plot(xaxis,yaxis)
lmp.command("run 0 pre no post yes")
# uncomment if running in parallel via Pypar
#print "Proc %d out of %d procs has" % (me,nprocs), lmp
#pypar.finalize()
| CFDEMproject/LIGGGHTS-PUBLIC | python/examples/plot.py | Python | gpl-2.0 | 1,885 | 0.015385 |
import codecs
from uuid import uuid4
from io import BytesIO
import six
from six import b
from .fields import RequestField
writer = codecs.lookup('utf-8')[3]
def choose_boundary():
"""
Our embarassingly-simple replacement for mimetools.choose_boundary.
"""
return uuid4().hex
def iter_field_objects(fields):
"""
Iterate over fields.
Supports list of (k, v) tuples and dicts, and lists of
:class:`~urllib3.fields.RequestField`.
"""
if isinstance(fields, dict):
i = six.iteritems(fields)
else:
i = iter(fields)
for field in i:
if isinstance(field, RequestField):
yield field
else:
yield RequestField.from_tuples(*field)
def iter_fields(fields):
"""
.. deprecated:: 1.6
Iterate over fields.
The addition of :class:`~urllib3.fields.RequestField` makes this function
obsolete. Instead, use :func:`iter_field_objects`, which returns
:class:`~urllib3.fields.RequestField` objects.
Supports list of (k, v) tuples and dicts.
"""
if isinstance(fields, dict):
return ((k, v) for k, v in six.iteritems(fields))
return ((k, v) for k, v in fields)
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
:param fields:
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
:param boundary:
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
body.write(b('--%s\r\n' % (boundary)))
writer(body).write(field.render_headers())
data = field.data
if isinstance(data, int):
data = str(data) # Backwards compatibility
if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
body.write(b'\r\n')
body.write(b('--%s--\r\n' % (boundary)))
content_type = str('multipart/form-data; boundary=%s' % boundary)
return body.getvalue(), content_type
| Mapotempo/mapotempo-qgis-plugin | urllib3/filepost.py | Python | gpl-2.0 | 2,256 | 0.000887 |
from functools import wraps
from uuid import uuid4
from app.globals import get_session_store
from app.utilities.schema import load_schema_from_session_data
def with_schema(function):
"""Adds the survey schema as the first argument to the function being wrapped.
Use on flask request handlers or methods called by flask request handlers.
May error unless there is a `current_user`, so should be used as follows e.g.
```python
@login_required
@with_schema
@full_routing_path_required
def get_block(routing_path, schema, *args):
...
```
"""
@wraps(function)
def wrapped_function(*args, **kwargs):
session_data = get_session_store().session_data
schema = load_schema_from_session_data(session_data)
return function(schema, *args, **kwargs)
return wrapped_function
def get_group_instance_id(schema, answer_store, location, answer_instance=0):
"""Return a group instance_id if required, or None if not"""
if not schema.location_requires_group_instance(location):
return None
dependent_drivers = schema.get_group_dependencies(location.group_id)
if dependent_drivers:
return _get_dependent_group_instance(schema, dependent_drivers, answer_store, location.group_instance)
existing_answers = []
if location.group_id in schema.get_group_dependencies_group_drivers() or \
location.block_id in schema.get_group_dependencies_group_drivers():
group_answer_ids = schema.get_answer_ids_for_group(location.group_id)
existing_answers = answer_store.filter(answer_ids=group_answer_ids, group_instance=location.group_instance)
if location.block_id in schema.get_group_dependencies_block_drivers():
block_answer_ids = schema.get_answer_ids_for_block(location.block_id)
existing_answers = answer_store.filter(answer_ids=block_answer_ids, answer_instance=answer_instance)
# If there are existing answers with a group_instance_id
existing_answers_with_group_instance_id = [answer for answer in existing_answers if answer.get('group_instance_id')]
if existing_answers_with_group_instance_id:
return existing_answers_with_group_instance_id[0]['group_instance_id']
return str(uuid4())
def _get_dependent_group_instance(schema, dependent_drivers, answer_store, group_instance):
group_instance_ids = []
for driver_id in dependent_drivers:
if driver_id in schema.get_group_dependencies_group_drivers():
if schema.get_group(driver_id):
driver_answer_ids = schema.get_answer_ids_for_group(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_group(answer_store, driver_answer_ids))
else:
driver_answer_ids = schema.get_answer_ids_for_block(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_group(answer_store, driver_answer_ids))
if driver_id in schema.get_group_dependencies_block_drivers():
driver_answer_ids = schema.get_answer_ids_for_block(driver_id)
group_instance_ids.extend(_get_group_instance_ids_for_block(answer_store, driver_answer_ids))
return group_instance_ids[group_instance]
def _get_group_instance_ids_for_group(answer_store, group_answer_ids):
group_instance_ids = []
group_instances = 0
for answer in list(answer_store.filter(answer_ids=group_answer_ids)):
group_instances = max(group_instances, answer['group_instance'])
for i in range(group_instances + 1):
answers = list(answer_store.filter(answer_ids=group_answer_ids, group_instance=i))
if answers:
group_instance_ids.append(answers[0]['group_instance_id'])
return group_instance_ids
def _get_group_instance_ids_for_block(answer_store, block_answer_ids):
group_instance_ids = []
answer_instances = 0
for answer in list(answer_store.filter(answer_ids=block_answer_ids)):
answer_instances = max(answer_instances, answer['answer_instance'])
for i in range(answer_instances + 1):
answers = list(answer_store.filter(answer_ids=block_answer_ids, answer_instance=i))
if answers:
group_instance_ids.append(answers[0]['group_instance_id'])
return group_instance_ids
| ONSdigital/eq-survey-runner | app/helpers/schema_helpers.py | Python | mit | 4,296 | 0.002793 |
# login methods are dynamically imported if auth is enabled
import logging
from .logout import Logout
import tornado.web
import _
@_.components.Register('auth')
class Authentication(tornado.web.RequestHandler):
@classmethod
def _pyConfig(cls, config):
cls.URL = config.pop('login_page', '/login')
| moertle/_.py | _/web/auth/__init__.py | Python | mit | 318 | 0.003145 |
from __future__ import print_function
import numpy as np
import theano
from theano import tensor
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from scipy.io import wavfile
import os
import sys
from kdllib import audio_file_iterator
from kdllib import numpy_one_hot, apply_quantize_preproc
from kdllib import numpy_softmax, numpy_sample_softmax
from kdllib import param, param_search, print_param_info
from kdllib import LearnedInitHidden
from kdllib import Linear
from kdllib import Embedding
from kdllib import Igor
from kdllib import load_checkpoint, theano_one_hot, concatenate
from kdllib import fetch_fruitspeech, list_iterator
from kdllib import np_zeros, GRU, GRUFork
from kdllib import make_weights, make_biases, relu, run_loop
from kdllib import as_shared, adam, gradient_clipping
from kdllib import get_values_from_function, set_shared_variables_in_function
from kdllib import soundsc, categorical_crossentropy
from kdllib import relu, softmax, sample_softmax
if __name__ == "__main__":
import argparse
fs = 16000
minibatch_size = 128
cut_len = 64
n_epochs = 1000 # Used way at the bottom in the training loop!
checkpoint_every_n_epochs = 1
checkpoint_every_n_updates = 1000
checkpoint_every_n_seconds = 60 * 60
random_state = np.random.RandomState(1999)
filepath = "/Tmp/kastner/blizzard_wav_files/*flac"
train_itr = audio_file_iterator(filepath, minibatch_size=minibatch_size,
stop_index=.9, preprocess="quantize")
valid_itr = audio_file_iterator(filepath, minibatch_size=minibatch_size,
start_index=.9, preprocess="quantize")
X_mb, X_mb_mask = next(train_itr)
train_itr.reset()
input_dim = 256
n_embed = 256
n_hid = 512
n_bins = 256
desc = "Speech generation"
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('-s', '--sample',
help='Sample from a checkpoint file',
default=None,
required=False)
def restricted_int(x):
if x is None:
# None makes it "auto" sample
return x
x = int(x)
if x < 1:
raise argparse.ArgumentTypeError("%r not range [1, inf]" % (x,))
return x
parser.add_argument('-sl', '--sample_length',
help='Number of steps to sample, default is automatic',
type=restricted_int,
default=None,
required=False)
def restricted_float(x):
if x is None:
# None makes it "auto" temperature
return x
x = float(x)
if x <= 0:
raise argparse.ArgumentTypeError("%r not range (0, inf]" % (x,))
return x
parser.add_argument('-t', '--temperature',
help='Sampling temperature for softmax',
type=restricted_float,
default=None,
required=False)
parser.add_argument('-c', '--continue', dest="cont",
help='Continue training from another saved model',
default=None,
required=False)
args = parser.parse_args()
if args.sample is not None:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
checkpoint_file = args.sample
if not os.path.exists(checkpoint_file):
raise ValueError("Checkpoint file path %s" % checkpoint_file,
" does not exist!")
print(checkpoint_file)
checkpoint_dict = load_checkpoint(checkpoint_file)
X_mb, X_mb_mask = next(train_itr)
train_itr.reset()
prev_h1, prev_h2, prev_h3 = [np_zeros((minibatch_size, n_hid))
for i in range(3)]
sample_function = checkpoint_dict["sample_function"]
if args.temperature is None:
args.temperature = 1.
if args.sample_length is None:
raise ValueError("NYI - use -sl or --sample_length ")
else:
fixed_steps = args.sample_length
temperature = args.temperature
completed = []
# 0 is in the middle
# CANNOT BE 1 timestep - will get floating point exception!
# 2 may still be buggy because X_sym gets sliced and scan gets mad with 1 timestep usually...
init_x = 127 + np_zeros((3, minibatch_size, 1)).astype(theano.config.floatX)
for i in range(fixed_steps):
if i % 100 == 0:
print("Sampling step %i" % i)
rvals = sample_function(init_x, prev_h1, prev_h2,
prev_h3)
sampled, h1_s, h2_s, h3_s = rvals
pred_s = numpy_softmax(sampled, temperature=temperature)
# debug=True gives argmax
# use 0 since it is a moving window
choice = numpy_sample_softmax(pred_s[0], random_state)
choice = choice[None]
completed.append(choice)
# use 3 since scan is throwing exceptions
init_x = np.concatenate((choice[..., None], choice[..., None], choice[..., None]),
axis=0)
init_x = init_x.astype(theano.config.floatX)
# use next step
prev_h1 = h1_s[0]
prev_h2 = h2_s[0]
prev_h3 = h3_s[0]
print("Completed sampling after %i steps" % fixed_steps)
# mb, length
completed = np.array(completed)[:, 0, :]
completed = completed.transpose(1, 0)
# all samples would be range(len(completed))
for i in range(10):
ex = completed[i].ravel()
s = "gen_%i.wav" % (i)
"""
ex = ex.astype("float32")
ex -= ex.min()
ex /= ex.max()
ex -= 0.5
ex *= 0.95
wavfile.write(s, fs, ex)
"""
wavfile.write(s, fs, soundsc(ex))
print("Sampling complete, exiting...")
sys.exit()
else:
print("No plotting arguments, starting training mode!")
X_sym = tensor.tensor3("X_sym")
X_sym.tag.test_value = X_mb[:cut_len]
X_mask_sym = tensor.matrix("X_mask_sym")
X_mask_sym.tag.test_value = X_mb_mask[:cut_len]
init_h1_i = tensor.matrix("init_h1")
init_h1_i.tag.test_value = np_zeros((minibatch_size, n_hid))
init_h2_i = tensor.matrix("init_h2")
init_h2_i.tag.test_value = np_zeros((minibatch_size, n_hid))
init_h3_i = tensor.matrix("init_h3")
init_h3_i.tag.test_value = np_zeros((minibatch_size, n_hid))
init_h1, init_h2, init_h3 = LearnedInitHidden(
[init_h1_i, init_h2_i, init_h3_i], 3 * [(minibatch_size, n_hid)])
inpt = X_sym[:-1]
target = X_sym[1:]
mask = X_mask_sym[:-1]
embed_dim = 256
embed1 = Embedding(inpt, 256, embed_dim, random_state)
in_h1, ingate_h1 = GRUFork([embed1], [embed_dim], n_hid, random_state)
in_h2, ingate_h2 = GRUFork([embed1], [embed_dim], n_hid, random_state)
in_h3, ingate_h3 = GRUFork([embed1], [embed_dim], n_hid, random_state)
def step(in_h1_t, ingate_h1_t,
in_h2_t, ingate_h2_t,
in_h3_t, ingate_h3_t,
h1_tm1, h2_tm1, h3_tm1):
h1_t = GRU(in_h1_t, ingate_h1_t, h1_tm1, n_hid, n_hid, random_state)
h1_h2_t, h1gate_h2_t = GRUFork([h1_t], [n_hid], n_hid, random_state)
h1_h3_t, h1gate_h3_t = GRUFork([h1_t], [n_hid], n_hid, random_state)
h2_t = GRU(h1_h2_t + in_h2_t, h1gate_h2_t + ingate_h2_t, h2_tm1,
n_hid, n_hid, random_state)
h2_h3_t, h2gate_h3_t = GRUFork([h2_t], [n_hid], n_hid, random_state)
h3_t = GRU(h2_h3_t + in_h3_t + h1_h3_t,
h2gate_h3_t + ingate_h3_t + h1gate_h3_t, h3_tm1,
n_hid, n_hid, random_state)
return h1_t, h2_t, h3_t
(h1, h2, h3), updates = theano.scan(
fn=step,
sequences=[in_h1, ingate_h1,
in_h2, ingate_h2,
in_h3, ingate_h3],
outputs_info=[init_h1, init_h2, init_h3])
out = Linear([embed1, h1, h2, h3], [embed_dim, n_hid, n_hid, n_hid],
n_bins, random_state)
pred = softmax(out)
shp = target.shape
target = target.reshape((shp[0], shp[1]))
target = theano_one_hot(target, n_classes=n_bins)
# dimshuffle so batch is on last axis
cost = categorical_crossentropy(pred, target)
cost = cost * mask.dimshuffle(0, 1)
# sum over sequence length and features, mean over minibatch
cost = cost.dimshuffle(1, 0)
cost = cost.mean()
# convert to bits vs nats
cost = cost * tensor.cast(1.44269504089, theano.config.floatX)
params = param_search(cost, lambda x: hasattr(x, "param"))
print_param_info(params)
grads = tensor.grad(cost, params)
grads = [tensor.clip(g, -1., 1.) for g in grads]
learning_rate = 1E-3
opt = adam(params, learning_rate)
updates = opt.updates(params, grads)
if args.cont is not None:
print("Continuing training from saved model")
continue_path = args.cont
if not os.path.exists(continue_path):
raise ValueError("Continue model %s, path not "
"found" % continue_path)
saved_checkpoint = load_checkpoint(continue_path)
checkpoint_dict = saved_checkpoint
train_function = checkpoint_dict["train_function"]
cost_function = checkpoint_dict["cost_function"]
predict_function = checkpoint_dict["predict_function"]
sample_function = checkpoint_dict["sample_function"]
"""
trained_weights = get_values_from_function(
saved_checkpoint["train_function"])
set_shared_variables_in_function(train_function, trained_weights)
"""
else:
train_function = theano.function([X_sym, X_mask_sym,
init_h1_i, init_h2_i, init_h3_i],
[cost, h1, h2, h3],
updates=updates,
on_unused_input="warn")
cost_function = theano.function([X_sym, X_mask_sym,
init_h1_i, init_h2_i, init_h3_i],
[cost, h1, h2, h3],
on_unused_input="warn")
predict_function = theano.function([inpt,
init_h1_i, init_h2_i, init_h3_i],
[out, h1, h2, h3],
on_unused_input="warn")
sample_function = theano.function([inpt,
init_h1_i, init_h2_i, init_h3_i],
[out, h1, h2, h3],
on_unused_input="warn")
checkpoint_dict = {}
checkpoint_dict["train_function"] = train_function
checkpoint_dict["cost_function"] = cost_function
checkpoint_dict["predict_function"] = predict_function
checkpoint_dict["sample_function"] = sample_function
def _loop(function, itr):
prev_h1, prev_h2, prev_h3 = [np_zeros((minibatch_size, n_hid))
for i in range(3)]
X_mb, X_mb_mask = next(itr)
# Sanity check there are no bugs in the mask
assert X_mb_mask.min() > 1E-6
n_cuts = len(X_mb) // cut_len + 1
partial_costs = []
for n in range(n_cuts):
if n % 100 == 0:
print("step %i" % n, end="")
else:
print(".", end="")
start = n * cut_len
stop = (n + 1) * cut_len
if len(X_mb[start:stop]) < cut_len:
# skip end edge case
break
rval = function(X_mb[start:stop],
X_mb_mask[start:stop],
prev_h1, prev_h2, prev_h3)
current_cost = rval[0]
prev_h1, prev_h2, prev_h3 = rval[1:4]
prev_h1 = prev_h1[-1]
prev_h2 = prev_h2[-1]
prev_h3 = prev_h3[-1]
partial_costs.append(current_cost)
print("")
return partial_costs
i = Igor(_loop, train_function, train_itr, cost_function, valid_itr,
n_epochs=n_epochs, checkpoint_dict=checkpoint_dict,
checkpoint_every_n_updates=checkpoint_every_n_updates,
checkpoint_every_n_seconds=checkpoint_every_n_seconds,
checkpoint_every_n_epochs=checkpoint_every_n_epochs,
skip_minimums=True)
#i.refresh(_loop, train_function, train_itr, cost_function, valid_itr,
# n_epochs, checkpoint_dict)
i.run()
| kastnerkyle/crikey | ishaan_model/ishaan_baseline.py | Python | bsd-3-clause | 13,070 | 0.000995 |
from Components.ActionMap import ActionMap
from Components.Sensors import sensors
from Components.Sources.Sensor import SensorSource
from Components.Sources.StaticText import StaticText
from Components.ConfigList import ConfigListScreen
from Components.config import getConfigListEntry
from Screens.Screen import Screen
from Plugins.Plugin import PluginDescriptor
from Components.FanControl import fancontrol
class TempFanControl(Screen, ConfigListScreen):
skin = """
<screen position="center,center" size="570,420" title="Fan Control" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" alphatest="on" />
<widget source="red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#18188b" transparent="1" />
<widget name="config" position="10,50" size="550,120" scrollbarMode="showOnDemand" />
<widget source="SensorTempText0" render="Label" position="10,150" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp0" render="Label" position="100,150" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText1" render="Label" position="10,170" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp1" render="Label" position="100,170" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText2" render="Label" position="10,190" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp2" render="Label" position="100,190" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText3" render="Label" position="10,210" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp3" render="Label" position="100,210" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText4" render="Label" position="10,230" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp4" render="Label" position="100,230" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText5" render="Label" position="10,250" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp5" render="Label" position="100,250" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText6" render="Label" position="10,270" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp6" render="Label" position="100,270" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorTempText7" render="Label" position="10,290" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorTemp7" render="Label" position="100,290" zPosition="1" size="100,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText0" render="Label" position="290,150" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan0" render="Label" position="380,150" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText1" render="Label" position="290,170" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan1" render="Label" position="380,170" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText2" render="Label" position="290,190" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan2" render="Label" position="380,190" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText3" render="Label" position="290,210" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan3" render="Label" position="380,210" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText4" render="Label" position="290,230" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan4" render="Label" position="380,230" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText5" render="Label" position="290,250" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan5" render="Label" position="380,250" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText6" render="Label" position="290,270" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan6" render="Label" position="380,270" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
<widget source="SensorFanText7" render="Label" position="290,290" zPosition="1" size="90,40" font="Regular;20" halign="left" valign="top" backgroundColor="#9f1313" transparent="1" />
<widget source="SensorFan7" render="Label" position="380,290" zPosition="1" size="150,20" font="Regular;19" halign="right">
<convert type="SensorToText"></convert>
</widget>
</screen>"""
def __init__(self, session, args = None):
Screen.__init__(self, session)
templist = sensors.getSensorsList(sensors.TYPE_TEMPERATURE)
tempcount = len(templist)
fanlist = sensors.getSensorsList(sensors.TYPE_FAN_RPM)
fancount = len(fanlist)
self["red"] = StaticText(_("Cancel"))
self["green"] = StaticText(_("OK"))
self["yellow"] = StaticText("")
self["blue"] = StaticText("")
for count in range(8):
if count < tempcount:
id = templist[count]
self["SensorTempText%d" % count] = StaticText(sensors.getSensorName(id))
self["SensorTemp%d" % count] = SensorSource(sensorid = id)
else:
self["SensorTempText%d" % count] = StaticText("")
self["SensorTemp%d" % count] = SensorSource()
if count < fancount:
id = fanlist[count]
self["SensorFanText%d" % count] = StaticText(sensors.getSensorName(id))
self["SensorFan%d" % count] = SensorSource(sensorid = id)
else:
self["SensorFanText%d" % count] = StaticText("")
self["SensorFan%d" % count] = SensorSource()
self.list = []
for count in range(fancontrol.getFanCount()):
self.list.append(getConfigListEntry(_("Fan %d Voltage") % (count + 1), fancontrol.getConfig(count).vlt))
self.list.append(getConfigListEntry(_("Fan %d PWM") % (count + 1), fancontrol.getConfig(count).pwm))
self.list.append(getConfigListEntry(_("Standby Fan %d Voltage") % (count + 1), fancontrol.getConfig(count).vlt_standby))
self.list.append(getConfigListEntry(_("Standby Fan %d PWM") % (count + 1), fancontrol.getConfig(count).pwm_standby))
ConfigListScreen.__init__(self, self.list, session = self.session)
#self["config"].list = self.list
#self["config"].setList(self.list)
self["config"].l.setSeperation(300)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "MenuActions"],
{
"ok": self.save,
"cancel": self.revert,
"red": self.revert,
"green": self.save,
"menu": self.closeRecursive,
}, -1)
def save(self):
for count in range(fancontrol.getFanCount()):
fancontrol.getConfig(count).vlt.save()
fancontrol.getConfig(count).pwm.save()
fancontrol.getConfig(count).vlt_standby.save()
fancontrol.getConfig(count).pwm_standby.save()
self.close()
def revert(self):
for count in range(fancontrol.getFanCount()):
fancontrol.getConfig(count).vlt.load()
fancontrol.getConfig(count).pwm.load()
fancontrol.getConfig(count).vlt_standby.load()
fancontrol.getConfig(count).pwm_standby.load()
self.close()
def main(session, **kwargs):
session.open(TempFanControl)
def startMenu(menuid):
if menuid != "system":
return []
return [(_("Temperature and Fan control"), main, "tempfancontrol", 80)]
def Plugins(**kwargs):
return PluginDescriptor(name = "Temperature and Fan control", description = _("Temperature and Fan control"), where = PluginDescriptor.WHERE_MENU, needsRestart = False, fnc = startMenu)
| bally12345/enigma2 | lib/python/Plugins/SystemPlugins/TempFanControl/plugin.py | Python | gpl-2.0 | 10,581 | 0.019941 |
# -*- encoding: UTF-8 -*-
import sys
import motion
import almath
from naoqi import ALProxy
def StiffnessOn(proxy):
# We use the "Body" name to signify the collection of all joints
pNames = "Body"
pStiffnessLists = 1.0
pTimeLists = 1.0
proxy.stiffnessInterpolation(pNames, pStiffnessLists, pTimeLists)
def main(robotIP):
'''
Example showing a Hula Hoop Motion
with the NAO cartesian control of torso
'''
# Init proxies.
try:
motionProxy = ALProxy("ALMotion", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALMotion"
print "Error was: ", e
try:
postureProxy = ALProxy("ALRobotPosture", robotIP, 9559)
except Exception, e:
print "Could not create proxy to ALRobotPosture"
print "Error was: ", e
# Set NAO in Stiffness On
StiffnessOn(motionProxy)
# Send NAO to Pose Init
postureProxy.goToPosture("StandInit", 0.5)
# Define the changes relative to the current position
dx = 0.07 # translation axis X (meter)
dy = 0.07 # translation axis Y (meter)
dwx = 0.15 # rotation axis X (rad)
dwy = 0.15 # rotation axis Y (rad)
# Define a path of two hula hoop loops
path = [ [+dx, 0.0, 0.0, 0.0, -dwy, 0.0], # point 01 : forward / bend backward
[0.0, -dy, 0.0, -dwx, 0.0, 0.0], # point 02 : right / bend left
[-dx, 0.0, 0.0, 0.0, dwy, 0.0], # point 03 : backward / bend forward
[0.0, +dy, 0.0, dwx, 0.0, 0.0], # point 04 : left / bend right
[+dx, 0.0, 0.0, 0.0, -dwy, 0.0], # point 01 : forward / bend backward
[0.0, -dy, 0.0, -dwx, 0.0, 0.0], # point 02 : right / bend left
[-dx, 0.0, 0.0, 0.0, dwy, 0.0], # point 03 : backward / bend forward
[0.0, +dy, 0.0, dwx, 0.0, 0.0], # point 04 : left / bend right
[+dx, 0.0, 0.0, 0.0, -dwy, 0.0], # point 05 : forward / bend backward
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0] ] # point 06 : Back to init pose
timeOneMove = 0.4 #seconds
times = []
for i in range(len(path)):
times.append( (i+1)*timeOneMove )
# call the cartesian control API
effector = "Torso"
space = motion.FRAME_ROBOT
axisMask = almath.AXIS_MASK_ALL
isAbsolute = False
motionProxy.positionInterpolation(effector, space, path,
axisMask, times, isAbsolute)
if __name__ == "__main__":
robotIp = "127.0.0.1"
if len(sys.argv) <= 1:
print "Usage python motion_hulaHoop.py robotIP (optional default: 127.0.0.1)"
else:
robotIp = sys.argv[1]
main(robotIp)
| KellyChan/python-examples | python/aldebaran/hana/hana/motion/cartesian/motion_hulaHoop.py | Python | mit | 2,824 | 0.009561 |
'''
Input : n=1
Output: {}
Input : n=2
Output:
{}{}
{{}}
https://www.geeksforgeeks.org/print-all-combinations-of-balanced-parentheses/
'''
def printParenthesis(string, openP, closeP):
if(openP==0 and closeP==0):
# all opening and closing are done
print string
else:
if(openP>closeP):
return
if(closeP>0):
printParenthesis(string+'}',openP,closeP-1)
if(openP>0):
printParenthesis(string+'{',openP-1,closeP)
n = 3
printParenthesis("", n,n)
| saurabhkumar1989/programming_question_python | my_question/all-valid-bracket-permutation.py | Python | apache-2.0 | 545 | 0.029358 |
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from distutils import core
from distutils.command.install import install
import sys, os, subprocess
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def git(*args):
return subprocess.check_call(['git'] + list(args))
class low_speed_spidev(install):
def run(self):
spidev_directory = subprocess.Popen(["pwd"],stdout=subprocess.PIPE)
spidev_directory, err = spidev_directory.communicate()
spidev_directory = spidev_directory.rstrip() + "/low_speed_spidev"
os.chdir("/usr/src/kernel/")
subprocess.call(["make", "scripts"])
subprocess.call("make")
os.chdir(spidev_directory)
subprocess.call(["insmod", "low-speed-spidev.ko"])
os.chdir("..")
class install_all(install):
def run(self):
current_directory = subprocess.Popen(["pwd"],stdout=subprocess.PIPE)
current_directory, err = current_directory.communicate()
subprocess.call(["sh","depends.sh"])
subprocess.call(["pip install -r requirements.txt --no-clean"], shell=True)
install.run(self)
setup(name = 'Maker project package',
version = '0.4',
author = 'Adafruit Industries, Intel Corporation',
author_email = '[email protected], [email protected]',
description = 'Library to provide a cross-platform GPIO interface on the Raspberry Pi and Beaglebone Black using the RPi.GPIO and Adafruit_BBIO libraries. Python code to run the hardware needed for the Minnowboard maker projects found at wiki.minnowboard.org',
license = 'MIT',
packages = ['pyDrivers' , 'Adafruit_Python_GPIO/Adafruit_GPIO'],
long_description = read('README.md'),
cmdclass={'low_speed_spidev':low_speed_spidev, 'install_all':install_all},
install_requires=['PIL', 'numpy'],
)
| MinnowBoard/minnow-maker | setup.py | Python | mit | 1,963 | 0.028018 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# AZURE CLI EventHub - NAMESPACE TEST DEFINITIONS
import time
from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, KeyVaultPreparer)
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
class EHNamespaceBYOKCURDScenarioTest(ScenarioTest):
from azure.cli.testsdk.scenario_tests import AllowLargeResponse
@AllowLargeResponse()
@ResourceGroupPreparer(name_prefix='cli_test_eh_namespace')
@KeyVaultPreparer(name_prefix='cli', name_len=15, additional_params='--enable-soft-delete --enable-purge-protection')
def test_eh_namespace_premium(self, resource_group):
self.kwargs.update({
'loc': 'eastus',
'rg': resource_group,
'namespacename': self.create_random_name(prefix='eventhubs-nscli', length=20),
'namespacename1': self.create_random_name(prefix='eventhubs-nscli', length=20),
'namespacename2': self.create_random_name(prefix='eventhubs-nscli', length=20),
'namespacenamekafka': self.create_random_name(prefix='eventhubs-nscli1', length=20),
'tags': {'tag1=value1'},
'tags2': {'tag2=value2'},
'sku': 'Standard',
'skupremium': 'Premium',
'authoname': self.create_random_name(prefix='cliAutho', length=20),
'defaultauthorizationrule': 'RootManageSharedAccessKey',
'accessrights': 'Send',
'accessrights1': 'Listen',
'primary': 'PrimaryKey',
'secondary': 'SecondaryKey',
'istrue': 'True',
'isfalse': 'False',
'enableidentity': 'True',
'maximumthroughputunits': 40,
'maximumthroughputunits_update': 5
})
kv_name = self.kwargs['kv']
key_name = self.create_random_name(prefix='cli', length=15)
key_uri = "https://{}.vault.azure.net/".format(kv_name)
self.kwargs.update({
'kv_name': kv_name,
'key_name': key_name,
'key_uri': key_uri
})
# Check for the NameSpace name Availability
self.cmd('eventhubs namespace exists --name {namespacename}', checks=[self.check('nameAvailable', True)])
# Create Namespace
self.cmd('eventhubs namespace create --resource-group {rg} --name {namespacename} --location {loc} --tags {tags}'
' --sku {sku} --maximum-throughput-units {maximumthroughputunits} --disable-local-auth {istrue} --enable-auto-inflate {istrue}',
checks=[self.check('maximumThroughputUnits', '{maximumthroughputunits}'),
self.check('disableLocalAuth', '{istrue}')])
self.kwargs.update({
'maximumthroughputunits': 35})
# Update Namespace
self.cmd('eventhubs namespace update --resource-group {rg} --name {namespacename} '
'--tags {tags2} --maximum-throughput-units {maximumthroughputunits}',
checks=[self.check('maximumThroughputUnits', '{maximumthroughputunits}')])
self.kwargs.update({
'maximumthroughputunits': 16})
# Create Namespace - premium
self.cmd(
'eventhubs namespace create --resource-group {rg} --name {namespacename1} --location {loc} --tags {tags}'
' --sku {skupremium} --disable-local-auth {isfalse}',
checks=[self.check('disableLocalAuth', '{isfalse}'),
self.check('sku.name', '{skupremium}')])
# Update Namespace
self.cmd('eventhubs namespace update --resource-group {rg} --name {namespacename1} --disable-local-auth {istrue} '
'--tags {tags2}')
# Delete Namespace list by ResourceGroup
self.cmd('eventhubs namespace delete --resource-group {rg} --name {namespacename}')
self.cmd('eventhubs namespace delete --resource-group {rg} --name {namespacename1}')
| yugangw-msft/azure-cli | src/azure-cli/azure/cli/command_modules/eventhubs/tests/latest/test_eventhub_commands_namespace_premium_test.py | Python | mit | 4,228 | 0.004257 |
# Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from oslo_config import cfg
from oslo_log import log as logging
from sqlalchemy.orm import exc
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron.db import common_db_mixin
from neutron.db import models_v2
LOG = logging.getLogger(__name__)
class DbBasePluginCommon(common_db_mixin.CommonDbMixin):
"""Stores getters and helper methods for db_base_plugin_v2
All private getters and simple helpers like _make_*_dict were moved from
db_base_plugin_v2.
More complicated logic and public methods left in db_base_plugin_v2.
Main purpose of this class is to make getters accessible for Ipam
backends.
"""
@staticmethod
def _generate_mac():
return utils.get_random_mac(cfg.CONF.base_mac.split(':'))
@staticmethod
def _delete_ip_allocation(context, network_id, subnet_id, ip_address):
# Delete the IP address from the IPAllocate table
LOG.debug("Delete allocated IP %(ip_address)s "
"(%(network_id)s/%(subnet_id)s)",
{'ip_address': ip_address,
'network_id': network_id,
'subnet_id': subnet_id})
context.session.query(models_v2.IPAllocation).filter_by(
network_id=network_id,
ip_address=ip_address,
subnet_id=subnet_id).delete()
@staticmethod
def _store_ip_allocation(context, ip_address, network_id, subnet_id,
port_id):
LOG.debug("Allocated IP %(ip_address)s "
"(%(network_id)s/%(subnet_id)s/%(port_id)s)",
{'ip_address': ip_address,
'network_id': network_id,
'subnet_id': subnet_id,
'port_id': port_id})
allocated = models_v2.IPAllocation(
network_id=network_id,
port_id=port_id,
ip_address=ip_address,
subnet_id=subnet_id
)
context.session.add(allocated)
def _make_subnet_dict(self, subnet, fields=None, context=None):
res = {'id': subnet['id'],
'name': subnet['name'],
'tenant_id': subnet['tenant_id'],
'network_id': subnet['network_id'],
'ip_version': subnet['ip_version'],
'cidr': subnet['cidr'],
'subnetpool_id': subnet.get('subnetpool_id'),
'allocation_pools': [{'start': pool['first_ip'],
'end': pool['last_ip']}
for pool in subnet['allocation_pools']],
'gateway_ip': subnet['gateway_ip'],
'enable_dhcp': subnet['enable_dhcp'],
'ipv6_ra_mode': subnet['ipv6_ra_mode'],
'ipv6_address_mode': subnet['ipv6_address_mode'],
'dns_nameservers': [dns['address']
for dns in subnet['dns_nameservers']],
'host_routes': [{'destination': route['destination'],
'nexthop': route['nexthop']}
for route in subnet['routes']],
}
# The shared attribute for a subnet is the same as its parent network
res['shared'] = self._make_network_dict(subnet.networks,
context=context)['shared']
# Call auxiliary extend functions, if any
self._apply_dict_extend_functions(attributes.SUBNETS, res, subnet)
return self._fields(res, fields)
def _make_subnetpool_dict(self, subnetpool, fields=None):
default_prefixlen = str(subnetpool['default_prefixlen'])
min_prefixlen = str(subnetpool['min_prefixlen'])
max_prefixlen = str(subnetpool['max_prefixlen'])
res = {'id': subnetpool['id'],
'name': subnetpool['name'],
'tenant_id': subnetpool['tenant_id'],
'default_prefixlen': default_prefixlen,
'min_prefixlen': min_prefixlen,
'max_prefixlen': max_prefixlen,
'shared': subnetpool['shared'],
'prefixes': [prefix['cidr']
for prefix in subnetpool['prefixes']],
'ip_version': subnetpool['ip_version'],
'default_quota': subnetpool['default_quota']}
return self._fields(res, fields)
def _make_port_dict(self, port, fields=None,
process_extensions=True):
res = {"id": port["id"],
'name': port['name'],
"network_id": port["network_id"],
'tenant_id': port['tenant_id'],
"mac_address": port["mac_address"],
"admin_state_up": port["admin_state_up"],
"status": port["status"],
"fixed_ips": [{'subnet_id': ip["subnet_id"],
'ip_address': ip["ip_address"]}
for ip in port["fixed_ips"]],
"device_id": port["device_id"],
"device_owner": port["device_owner"]}
# Call auxiliary extend functions, if any
if process_extensions:
self._apply_dict_extend_functions(
attributes.PORTS, res, port)
return self._fields(res, fields)
def _get_ipam_subnetpool_driver(self, context, subnetpool=None):
if cfg.CONF.ipam_driver:
return ipam_base.Pool.get_instance(subnetpool, context)
else:
return subnet_alloc.SubnetAllocator(subnetpool, context)
def _get_network(self, context, id):
try:
network = self._get_by_id(context, models_v2.Network, id)
except exc.NoResultFound:
raise n_exc.NetworkNotFound(net_id=id)
return network
def _get_subnet(self, context, id):
try:
subnet = self._get_by_id(context, models_v2.Subnet, id)
except exc.NoResultFound:
raise n_exc.SubnetNotFound(subnet_id=id)
return subnet
def _get_subnetpool(self, context, id):
try:
return self._get_by_id(context, models_v2.SubnetPool, id)
except exc.NoResultFound:
raise n_exc.SubnetPoolNotFound(subnetpool_id=id)
def _get_all_subnetpools(self, context):
# NOTE(tidwellr): see note in _get_all_subnets()
return context.session.query(models_v2.SubnetPool).all()
def _get_port(self, context, id):
try:
port = self._get_by_id(context, models_v2.Port, id)
except exc.NoResultFound:
raise n_exc.PortNotFound(port_id=id)
return port
def _get_dns_by_subnet(self, context, subnet_id):
dns_qry = context.session.query(models_v2.DNSNameServer)
return dns_qry.filter_by(subnet_id=subnet_id).all()
def _get_route_by_subnet(self, context, subnet_id):
route_qry = context.session.query(models_v2.SubnetRoute)
return route_qry.filter_by(subnet_id=subnet_id).all()
def _get_router_gw_ports_by_network(self, context, network_id):
port_qry = context.session.query(models_v2.Port)
return port_qry.filter_by(network_id=network_id,
device_owner=constants.DEVICE_OWNER_ROUTER_GW).all()
def _get_subnets_by_network(self, context, network_id):
subnet_qry = context.session.query(models_v2.Subnet)
return subnet_qry.filter_by(network_id=network_id).all()
def _get_subnets_by_subnetpool(self, context, subnetpool_id):
subnet_qry = context.session.query(models_v2.Subnet)
return subnet_qry.filter_by(subnetpool_id=subnetpool_id).all()
def _get_all_subnets(self, context):
# NOTE(salvatore-orlando): This query might end up putting
# a lot of stress on the db. Consider adding a cache layer
return context.session.query(models_v2.Subnet).all()
def _get_subnets(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
marker_obj = self._get_marker_obj(context, 'subnet', limit, marker)
make_subnet_dict = functools.partial(self._make_subnet_dict,
context=context)
return self._get_collection(context, models_v2.Subnet,
make_subnet_dict,
filters=filters, fields=fields,
sorts=sorts,
limit=limit,
marker_obj=marker_obj,
page_reverse=page_reverse)
def _make_network_dict(self, network, fields=None,
process_extensions=True, context=None):
res = {'id': network['id'],
'name': network['name'],
'tenant_id': network['tenant_id'],
'admin_state_up': network['admin_state_up'],
'mtu': network.get('mtu', constants.DEFAULT_NETWORK_MTU),
'status': network['status'],
'subnets': [subnet['id']
for subnet in network['subnets']]}
# The shared attribute for a network now reflects if the network
# is shared to the calling tenant via an RBAC entry.
shared = False
matches = ('*',) + ((context.tenant_id,) if context else ())
for entry in network.rbac_entries:
if (entry.action == 'access_as_shared' and
entry.target_tenant in matches):
shared = True
break
res['shared'] = shared
# TODO(pritesh): Move vlan_transparent to the extension module.
# vlan_transparent here is only added if the vlantransparent
# extension is enabled.
if ('vlan_transparent' in network and network['vlan_transparent'] !=
attributes.ATTR_NOT_SPECIFIED):
res['vlan_transparent'] = network['vlan_transparent']
# Call auxiliary extend functions, if any
if process_extensions:
self._apply_dict_extend_functions(
attributes.NETWORKS, res, network)
return self._fields(res, fields)
def _make_subnet_args(self, detail, subnet, subnetpool_id):
gateway_ip = str(detail.gateway_ip) if detail.gateway_ip else None
args = {'tenant_id': detail.tenant_id,
'id': subnet['id'],
'name': subnet['name'],
'network_id': subnet['network_id'],
'ip_version': subnet['ip_version'],
'cidr': str(detail.subnet_cidr),
'subnetpool_id': subnetpool_id,
'enable_dhcp': subnet['enable_dhcp'],
'gateway_ip': gateway_ip}
if subnet['ip_version'] == 6 and subnet['enable_dhcp']:
if attributes.is_attr_set(subnet['ipv6_ra_mode']):
args['ipv6_ra_mode'] = subnet['ipv6_ra_mode']
if attributes.is_attr_set(subnet['ipv6_address_mode']):
args['ipv6_address_mode'] = subnet['ipv6_address_mode']
return args
def _make_fixed_ip_dict(self, ips):
# Excludes from dict all keys except subnet_id and ip_address
return [{'subnet_id': ip["subnet_id"],
'ip_address': ip["ip_address"]}
for ip in ips]
| paninetworks/neutron | neutron/db/db_base_plugin_common.py | Python | apache-2.0 | 12,074 | 0.000166 |
import keras
from keras.optimizers import SGD, adadelta, rmsprop, adam
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import np_utils
from keras.metrics import matthews_correlation, precision, recall
import keras.backend as K
import cPickle
import numpy as np
import getpass
username = getpass.getuser()
from little_foo3 import foo
def sens(y_true, y_pred):
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
tp = K.sum(y_pos * y_pred_pos)
tn = K.sum(y_neg * y_pred_neg)
fp = K.sum(y_neg * y_pred_pos)
fn = K.sum(y_pos * y_pred_neg)
se = tp / (tp + fn)
return se
def spec(y_true, y_pred):
y_pred_pos = K.round(K.clip(y_pred, 0, 1))
y_pred_neg = 1 - y_pred_pos
y_pos = K.round(K.clip(y_true, 0, 1))
y_neg = 1 - y_pos
tp = K.sum(y_pos * y_pred_pos)
tn = K.sum(y_neg * y_pred_neg)
fp = K.sum(y_neg * y_pred_pos)
fn = K.sum(y_pos * y_pred_neg)
sp = tn / (fp + tn)
return sp
def get_weights(n_dataset, username):
weights='best_weights_v5_labcrossval_{0}_train_3_{1}.h5'.format(i, username)
model = foo()
model.load_weights(weights)
print ('weights loaded')
return model
def get_data(n_dataset):
f = file('MODS_224_224_{0}_test_3.pkl'.format(n_dataset),'rb')
data = cPickle.load(f)
f.close()
validation_data = data[0]
training_data = data[1]
t_data = training_data[0]
t_label = training_data[1]
test_data = validation_data[0]
test_label = validation_data[1]
t_data = np.array(t_data)
t_label = np.array(t_label)
test_data = np.array(test_data)
test_label = np.array(test_label)
t_data = t_data.reshape(t_data.shape[0], 1, 224, 224)
test_data = test_data.reshape(test_data.shape[0], 1, 224, 224)
#less precision means less memory needed: 64 -> 32 (half the memory used)
t_data = t_data.astype('float32')
test_data = test_data.astype('float32')
return (t_data, t_label), (test_data, test_label)
def test_net(i):
model = get_weights(i, username)
print 'using weights from net trained on dataset {0}'.format(i)
history = LossAccHistory()
(X_train, y_train), (X_test, y_test) = get_data(i)
Y_test = np_utils.to_categorical(y_test, nb_classes)
X_test /= 255
print(X_test.shape[0], 'test samples')
model.compile(loss='binary_crossentropy',
optimizer= rmsprop(lr=0.001), #adadelta
metrics=['accuracy', 'matthews_correlation', 'precision', 'recall', sens, spec])
score = model.evaluate(X_test, Y_test, verbose=1)
print (model.metrics_names, score)
if (len(cvscores[0])==0): #if metric names haven't been saved, do so
cvscores[0].append(model.metrics_names)
else:
counter = 1
for k in score: #for each test metric, append it to the cvscores list
cvscores[counter].append(k)
counter +=1
model.reset_states()
def cv_calc():
#calculate mean and stdev for each metric, and append them to test_metrics file
test_metrics.append(cvscores[0])
other_counter = 0
for metric in cvscores[1:]:
v = 'test {0}: {1:.4f} +/- {2:.4f}%'.format(cvscores[0][0][other_counter], np.mean(metric), np.std(metric))
print v
test_metrics.append(v)
other_counter +=1
if other_counter == 7:
other_counter=0
return cvscores, test_metrics
def save_metrics(cvscores, test_metrics):
#save test metrics to txt file
file = open('MODS_test_metrics_labscrossval.txt', 'w')
for j in cvscores:
file.write('\n%s\n' % j)
for i in test_metrics:
file.write('\n%s\n' % i)
file.close()
print test_metrics
class LossAccHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.losses = []
self.accu = []
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
self.accu.append(logs.get('acc'))
nb_classes = 2
nb_epoch = 100
n_dataset = 5
dropout = 0.5
batch_size = 72
optimizer = 'rmsprop'
test_metrics = []
cvscores = [[],[],[],[],[],[], [], []]
#cvscores = [[metrics],[loss],[acc],[mcc],[precision],[recall], [sens], [spec]]
for i in xrange(n_dataset):
test_net(i)
cvscores, test_metrics = cv_calc()
print cvscores, test_metrics
save_metrics(cvscores, test_metrics)
| santiagolopezg/MODS_ConvNet | test_lillabcrossval_network.py | Python | mit | 4,398 | 0.033197 |
# http://codingbat.com/prob/p184853
def big_diff(nums):
max_num = nums[0]
min_num = nums[0]
for num in nums:
max_num = max(num, max_num)
min_num = min(num, min_num)
return abs(max_num - min_num)
| dvt32/cpp-journey | Python/CodingBat/big_diff.py | Python | mit | 220 | 0.027273 |
"""Public interface managing the workflow for peer assessments.
The Peer Assessment Workflow API exposes all public actions required to complete
the workflow for a given submission.
"""
import logging
from django.db import DatabaseError, IntegrityError, transaction
from django.utils import timezone
from dogapi import dog_stats_api
from openassessment.assessment.errors import (PeerAssessmentInternalError, PeerAssessmentRequestError,
PeerAssessmentWorkflowError)
from openassessment.assessment.models import (Assessment, AssessmentFeedback, AssessmentPart, InvalidRubricSelection,
PeerWorkflow, PeerWorkflowItem)
from openassessment.assessment.serializers import (AssessmentFeedbackSerializer, InvalidRubric, RubricSerializer,
full_assessment_dict, rubric_from_dict, serialize_assessments)
from submissions import api as sub_api
logger = logging.getLogger("openassessment.assessment.api.peer")
PEER_TYPE = "PE"
def submitter_is_finished(submission_uuid, peer_requirements):
"""
Check whether the submitter has made the required number of assessments.
If the requirements dict is None (because we're being updated
asynchronously or when the workflow is first created),
then automatically return False.
Args:
submission_uuid (str): The UUID of the submission being tracked.
peer_requirements (dict): Dictionary with the key "must_grade" indicating
the required number of submissions the student must grade.
Returns:
bool
"""
if peer_requirements is None:
return False
try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
if workflow.completed_at is not None:
return True
elif workflow.num_peers_graded() >= peer_requirements["must_grade"]:
workflow.completed_at = timezone.now()
workflow.save()
return True
return False
except PeerWorkflow.DoesNotExist:
return False
except KeyError:
raise PeerAssessmentRequestError(u'Requirements dict must contain "must_grade" key')
def assessment_is_finished(submission_uuid, peer_requirements):
"""
Check whether the submitter has received enough assessments
to get a score.
If the requirements dict is None (because we're being updated
asynchronously or when the workflow is first created),
then automatically return False.
Args:
submission_uuid (str): The UUID of the submission being tracked.
peer_requirements (dict): Dictionary with the key "must_be_graded_by"
indicating the required number of assessments the student
must receive to get a score.
Returns:
bool
"""
if not peer_requirements:
return False
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if workflow is None:
return False
scored_items = workflow.graded_by.filter(
assessment__submission_uuid=submission_uuid,
assessment__score_type=PEER_TYPE
)
return scored_items.count() >= peer_requirements["must_be_graded_by"]
def on_start(submission_uuid):
"""Create a new peer workflow for a student item and submission.
Creates a unique peer workflow for a student item, associated with a
submission.
Args:
submission_uuid (str): The submission associated with this workflow.
Returns:
None
Raises:
SubmissionError: There was an error retrieving the submission.
PeerAssessmentInternalError: Raised when there is an internal error
creating the Workflow.
"""
try:
with transaction.atomic():
submission = sub_api.get_submission_and_student(submission_uuid)
workflow, __ = PeerWorkflow.objects.get_or_create(
student_id=submission['student_item']['student_id'],
course_id=submission['student_item']['course_id'],
item_id=submission['student_item']['item_id'],
submission_uuid=submission_uuid
)
workflow.save()
except IntegrityError:
# If we get an integrity error, it means someone else has already
# created a workflow for this submission, so we don't need to do anything.
pass
except DatabaseError:
error_message = (
u"An internal error occurred while creating a new peer "
u"workflow for submission {}"
.format(submission_uuid)
)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def get_score(submission_uuid, peer_requirements):
"""
Retrieve a score for a submission if requirements have been satisfied.
Args:
submission_uuid (str): The UUID of the submission.
requirements (dict): Dictionary with the key "must_be_graded_by"
indicating the required number of assessments the student
must receive to get a score.
Returns:
A dictionary with the points earned, points possible, and
contributing_assessments information, along with a None staff_id.
"""
if peer_requirements is None:
return None
# User hasn't completed their own submission yet
if not submitter_is_finished(submission_uuid, peer_requirements):
return None
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if workflow is None:
return None
# Retrieve the assessments in ascending order by score date,
# because we want to use the *first* one(s) for the score.
items = workflow.graded_by.filter(
assessment__submission_uuid=submission_uuid,
assessment__score_type=PEER_TYPE
).order_by('-assessment')
submission_finished = items.count() >= peer_requirements["must_be_graded_by"]
if not submission_finished:
return None
# Unfortunately, we cannot use update() after taking a slice,
# so we need to update the and save the items individually.
# One might be tempted to first query for the first n assessments,
# then select items that have those assessments.
# However, this generates a SQL query with a LIMIT in a subquery,
# which is not supported by some versions of MySQL.
# Although this approach generates more database queries, the number is likely to
# be relatively small (at least 1 and very likely less than 5).
for scored_item in items[:peer_requirements["must_be_graded_by"]]:
scored_item.scored = True
scored_item.save()
assessments = [item.assessment for item in items]
return {
"points_earned": sum(
get_assessment_median_scores(submission_uuid).values()
),
"points_possible": assessments[0].points_possible,
"contributing_assessments": [assessment.id for assessment in assessments],
"staff_id": None,
}
def create_assessment(
scorer_submission_uuid,
scorer_id,
options_selected,
criterion_feedback,
overall_feedback,
rubric_dict,
num_required_grades,
scored_at=None
):
"""Creates an assessment on the given submission.
Assessments are created based on feedback associated with a particular
rubric.
Args:
scorer_submission_uuid (str): The submission uuid for the Scorer's
workflow. The submission being assessed can be determined via the
peer workflow of the grading student.
scorer_id (str): The user ID for the user giving this assessment. This
is required to create an assessment on a submission.
options_selected (dict): Dictionary mapping criterion names to the
option names the user selected for that criterion.
criterion_feedback (dict): Dictionary mapping criterion names to the
free-form text feedback the user gave for the criterion.
Since criterion feedback is optional, some criteria may not appear
in the dictionary.
overall_feedback (unicode): Free-form text feedback on the submission overall.
num_required_grades (int): The required number of assessments a
submission requires before it is completed. If this number of
assessments is reached, the grading_completed_at timestamp is set
for the Workflow.
Keyword Args:
scored_at (datetime): Optional argument to override the time in which
the assessment took place. If not specified, scored_at is set to
now.
Returns:
dict: the Assessment model, serialized as a dict.
Raises:
PeerAssessmentRequestError: Raised when the submission_id is invalid, or
the assessment_dict does not contain the required values to create
an assessment.
PeerAssessmentInternalError: Raised when there is an internal error
while creating a new assessment.
Examples:
>>> options_selected = {"clarity": "Very clear", "precision": "Somewhat precise"}
>>> criterion_feedback = {"clarity": "I thought this essay was very clear."}
>>> feedback = "Your submission was thrilling."
>>> create_assessment("1", "Tim", options_selected, criterion_feedback, feedback, rubric_dict)
"""
try:
# Retrieve workflow information
scorer_workflow = PeerWorkflow.objects.get(submission_uuid=scorer_submission_uuid)
peer_workflow_item = scorer_workflow.find_active_assessments()
if peer_workflow_item is None:
message = (
u"There are no open assessments associated with the scorer's "
u"submission UUID {}."
).format(scorer_submission_uuid)
logger.warning(message)
raise PeerAssessmentWorkflowError(message)
peer_submission_uuid = peer_workflow_item.submission_uuid
assessment = _complete_assessment(
rubric_dict,
scorer_id,
peer_submission_uuid,
options_selected,
criterion_feedback,
scorer_workflow,
overall_feedback,
num_required_grades,
scored_at
)
_log_assessment(assessment, scorer_workflow)
return full_assessment_dict(assessment)
except PeerWorkflow.DoesNotExist:
message = (
u"There is no Peer Workflow associated with the given "
u"submission UUID {}."
).format(scorer_submission_uuid)
logger.exception(message)
raise PeerAssessmentWorkflowError(message)
except InvalidRubric:
msg = u"The rubric definition is not valid."
logger.exception(msg)
raise PeerAssessmentRequestError(msg)
except InvalidRubricSelection:
msg = u"Invalid options were selected in the rubric."
logger.warning(msg, exc_info=True)
raise PeerAssessmentRequestError(msg)
except DatabaseError:
error_message = (
u"An error occurred while creating an assessment by the scorer with this ID: {}"
).format(scorer_id)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
@transaction.atomic
def _complete_assessment(
rubric_dict,
scorer_id,
peer_submission_uuid,
options_selected,
criterion_feedback,
scorer_workflow,
overall_feedback,
num_required_grades,
scored_at
):
"""
Internal function for atomic assessment creation. Creates a peer assessment
and closes the associated peer workflow item in a single transaction.
Args:
rubric_dict (dict): The rubric model associated with this assessment
scorer_id (str): The user ID for the user giving this assessment. This
is required to create an assessment on a submission.
peer_submission_uuid (str): The submission uuid for the submission being
assessed.
options_selected (dict): Dictionary mapping criterion names to the
option names the user selected for that criterion.
criterion_feedback (dict): Dictionary mapping criterion names to the
free-form text feedback the user gave for the criterion.
Since criterion feedback is optional, some criteria may not appear
in the dictionary.
scorer_workflow (PeerWorkflow): The PeerWorkflow associated with the
scorer. Updates the workflow item associated with this assessment.
overall_feedback (unicode): Free-form text feedback on the submission overall.
num_required_grades (int): The required number of assessments a
submission requires before it is completed. If this number of
assessments is reached, the grading_completed_at timestamp is set
for the Workflow.
scored_at (datetime): Optional argument to override the time in which
the assessment took place. If not specified, scored_at is set to
now.
Returns:
The Assessment model
"""
# Get or create the rubric
rubric = rubric_from_dict(rubric_dict)
# Create the peer assessment
assessment = Assessment.create(
rubric,
scorer_id,
peer_submission_uuid,
PEER_TYPE,
scored_at=scored_at,
feedback=overall_feedback
)
# Create assessment parts for each criterion in the rubric
# This will raise an `InvalidRubricSelection` if the selected options do not
# match the rubric.
AssessmentPart.create_from_option_names(assessment, options_selected, feedback=criterion_feedback)
# Close the active assessment
scorer_workflow.close_active_assessment(peer_submission_uuid, assessment, num_required_grades)
return assessment
def get_rubric_max_scores(submission_uuid):
"""Gets the maximum possible value for each criterion option
Iterates over the rubric used to grade the given submission, and creates a
dictionary of maximum possible values.
Args:
submission_uuid: The submission to get the associated rubric max scores.
Returns:
A dictionary of max scores for this rubric's criteria options. Returns
None if no assessments are found for this submission.
Raises:
PeerAssessmentInternalError: Raised when there is an error retrieving
the submission, or its associated rubric.
"""
try:
assessments = list(
Assessment.objects.filter(
submission_uuid=submission_uuid
).order_by("-scored_at", "-id").select_related("rubric")[:1]
)
if not assessments:
return None
assessment = assessments[0]
rubric_dict = RubricSerializer.serialized_from_cache(assessment.rubric)
return {
criterion["name"]: criterion["points_possible"]
for criterion in rubric_dict["criteria"]
}
except DatabaseError:
error_message = (
u"Error getting rubric options max scores for submission uuid {uuid}"
).format(uuid=submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def get_assessment_median_scores(submission_uuid):
"""Get the median score for each rubric criterion
For a given assessment, collect the median score for each criterion on the
rubric. This set can be used to determine the overall score, as well as each
part of the individual rubric scores.
If there is a true median score, it is returned. If there are two median
values, the average of those two values is returned, rounded up to the
greatest integer value.
Args:
submission_uuid (str): The submission uuid is used to get the
assessments used to score this submission, and generate the
appropriate median score.
Returns:
dict: A dictionary of rubric criterion names,
with a median score of the peer assessments.
Raises:
PeerAssessmentInternalError: If any error occurs while retrieving
information to form the median scores, an error is raised.
"""
try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
items = workflow.graded_by.filter(scored=True)
assessments = [item.assessment for item in items]
scores = Assessment.scores_by_criterion(assessments)
return Assessment.get_median_score_dict(scores)
except PeerWorkflow.DoesNotExist:
return {}
except DatabaseError:
error_message = (
u"Error getting assessment median scores for submission {uuid}"
).format(uuid=submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def has_finished_required_evaluating(submission_uuid, required_assessments):
"""Check if a student still needs to evaluate more submissions
Per the contract of the peer assessment workflow, a student must evaluate a
number of peers before receiving feedback on their submission.
Args:
submission_uuid (str): The submission UUID is required to determine if
the associated student has completed enough assessments. This
argument is required.
required_assessments (int): The number of assessments a student has to
submit before receiving the feedback on their submission. This is a
required argument.
Returns:
tuple: True if the student has evaluated enough peer submissions to move
through the peer assessment workflow. False if the student needs to
evaluate more peer submissions. The second value is the count of
assessments completed.
Raises:
PeerAssessmentRequestError: Raised when the submission UUID is invalid,
or the required_assessments is not a positive integer.
PeerAssessmentInternalError: Raised when there is an internal error
while evaluating this workflow rule.
Examples:
>>> has_finished_required_evaluating("abc123", 3)
True, 3
"""
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
done = False
peers_graded = 0
if workflow:
peers_graded = workflow.num_peers_graded()
done = (peers_graded >= required_assessments)
return done, peers_graded
def get_assessments(submission_uuid, limit=None):
"""Retrieve the assessments for a submission.
Retrieves all the assessments for a submissions. This API returns related
feedback without making any assumptions about grading. Any outstanding
assessments associated with this submission will not be returned.
Args:
submission_uuid (str): The submission all the requested assessments are
associated with. Required.
Keyword Arguments:
limit (int): Limit the returned assessments. If None, returns all.
Returns:
list: A list of dictionaries, where each dictionary represents a
separate assessment. Each assessment contains points earned, points
possible, time scored, scorer id, score type, and feedback.
Raises:
PeerAssessmentRequestError: Raised when the submission_id is invalid.
PeerAssessmentInternalError: Raised when there is an internal error
while retrieving the assessments associated with this submission.
Examples:
>>> get_assessments("1", limit=2)
[
{
'points_earned': 6,
'points_possible': 12,
'scored_at': datetime.datetime(2014, 1, 29, 17, 14, 52, 649284 tzinfo=<UTC>),
'scorer': u"Tim",
'feedback': u'Your submission was thrilling.'
},
{
'points_earned': 11,
'points_possible': 12,
'scored_at': datetime.datetime(2014, 1, 31, 14, 10, 17, 544214 tzinfo=<UTC>),
'scorer': u"Bob",
'feedback': u'Great submission.'
}
]
"""
try:
assessments = Assessment.objects.filter(
submission_uuid=submission_uuid,
score_type=PEER_TYPE
)[:limit]
return serialize_assessments(assessments)
except DatabaseError:
error_message = (
u"Error getting assessments for submission {uuid}"
).format(uuid=submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def get_submitted_assessments(submission_uuid, limit=None):
"""Retrieve the assessments created by the given submission's author.
Retrieves all the assessments created by the given submission's author. This
API returns related feedback without making any assumptions about grading.
Any outstanding assessments associated with this submission will not be
returned.
Args:
submission_uuid (str): The submission of the student whose assessments
we are requesting. Required.
Keyword Arguments:
limit (int): Limit the returned assessments. If None, returns all.
Returns:
list(dict): A list of dictionaries, where each dictionary represents a
separate assessment. Each assessment contains points earned, points
possible, time scored, scorer id, score type, and feedback. If no
workflow is found associated with the given submission_uuid, returns
an empty list.
Raises:
PeerAssessmentRequestError: Raised when the submission_id is invalid.
PeerAssessmentInternalError: Raised when there is an internal error
while retrieving the assessments associated with this submission.
Examples:
>>> get_submitted_assessments("1", limit=2)
[
{
'points_earned': 6,
'points_possible': 12,
'scored_at': datetime.datetime(2014, 1, 29, 17, 14, 52, 649284 tzinfo=<UTC>),
'scorer': u"Tim",
'feedback': u'Your submission was thrilling.'
},
{
'points_earned': 11,
'points_possible': 12,
'scored_at': datetime.datetime(2014, 1, 31, 14, 10, 17, 544214 tzinfo=<UTC>),
'scorer': u"Tim",
'feedback': u'Great submission.'
}
]
"""
try:
# If no workflow is found associated with the uuid, this returns None,
# and an empty set of assessments will be returned.
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
items = PeerWorkflowItem.objects.filter(
scorer=workflow,
assessment__isnull=False
)
assessments = Assessment.objects.filter(
pk__in=[item.assessment.pk for item in items])[:limit]
return serialize_assessments(assessments)
except DatabaseError:
error_message = (
u"Couldn't retrieve the assessments completed by the "
" student with submission {uuid}"
).format(uuid=submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def get_submission_to_assess(submission_uuid, graded_by):
"""Get a submission to peer evaluate.
Retrieves a submission for assessment for the given student. This will
not return a submission submitted by the requesting scorer. Submissions are
returned based on how many assessments are still required, and if there are
peers actively assessing a particular submission. If there are no
submissions requiring assessment, a submission may be returned that will be
'over graded', and the assessment will not be counted towards the overall
grade.
Args:
submission_uuid (str): The submission UUID from the student
requesting a submission for assessment. This is used to explicitly
avoid giving the student their own submission, and determines the
associated Peer Workflow.
graded_by (int): The number of assessments a submission
requires before it has completed the peer assessment process.
Returns:
dict: A peer submission for assessment. This contains a 'student_item',
'attempt_number', 'submitted_at', 'created_at', and 'answer' field to be
used for assessment.
Raises:
PeerAssessmentRequestError: Raised when the request parameters are
invalid for the request.
PeerAssessmentInternalError: Raised when there is an internal error
retrieving peer workflow information.
PeerAssessmentWorkflowError: Raised when an error occurs because this
function, or the student item, is not in the proper workflow state
to retrieve a peer submission.
Examples:
>>> get_submission_to_assess("abc123", 3)
{
'student_item': 2,
'attempt_number': 1,
'submitted_at': datetime.datetime(2014, 1, 29, 23, 14, 52, 649284, tzinfo=<UTC>),
'created_at': datetime.datetime(2014, 1, 29, 17, 14, 52, 668850, tzinfo=<UTC>),
'answer': u'The answer is 42.'
}
"""
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if not workflow:
raise PeerAssessmentWorkflowError(
u"A Peer Assessment Workflow does not exist for the student "
u"with submission UUID {}".format(submission_uuid)
)
if workflow.is_cancelled:
return None
open_item = workflow.find_active_assessments()
peer_submission_uuid = open_item.submission_uuid if open_item else None
# If there is an active assessment for this user, get that submission,
# otherwise, get the first assessment for review, otherwise,
# get the first submission available for over grading ("over-grading").
if peer_submission_uuid is None:
peer_submission_uuid = workflow.get_submission_for_review(graded_by)
if peer_submission_uuid is None:
peer_submission_uuid = workflow.get_submission_for_over_grading()
if peer_submission_uuid:
try:
submission_data = sub_api.get_submission(peer_submission_uuid)
PeerWorkflow.create_item(workflow, peer_submission_uuid)
_log_workflow(peer_submission_uuid, workflow)
return submission_data
except sub_api.SubmissionNotFoundError:
error_message = (
u"Could not find a submission with the uuid {} for student {} "
u"in the peer workflow."
).format(peer_submission_uuid, workflow.student_id)
logger.exception(error_message)
raise PeerAssessmentWorkflowError(error_message)
else:
logger.info(
u"No submission found for {} to assess ({}, {})"
.format(
workflow.student_id,
workflow.course_id,
workflow.item_id,
)
)
return None
def create_peer_workflow(submission_uuid):
"""Create a new peer workflow for a student item and submission.
Creates a unique peer workflow for a student item, associated with a
submission.
Args:
submission_uuid (str): The submission associated with this workflow.
Returns:
None
Raises:
SubmissionError: There was an error retrieving the submission.
PeerAssessmentInternalError: Raised when there is an internal error
creating the Workflow.
Examples:
>>> create_peer_workflow("1")
"""
try:
with transaction.atomic():
submission = sub_api.get_submission_and_student(submission_uuid)
workflow, __ = PeerWorkflow.objects.get_or_create(
student_id=submission['student_item']['student_id'],
course_id=submission['student_item']['course_id'],
item_id=submission['student_item']['item_id'],
submission_uuid=submission_uuid
)
workflow.save()
except IntegrityError:
# If we get an integrity error, it means someone else has already
# created a workflow for this submission, so we don't need to do anything.
pass
except DatabaseError:
error_message = (
u"An internal error occurred while creating a new peer "
u"workflow for submission {}"
).format(submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def create_peer_workflow_item(scorer_submission_uuid, submission_uuid):
"""
Begin peer-assessing a particular submission.
Note that this does NOT pick the submission from the prioritized list of available submissions.
Mainly useful for testing.
Args:
scorer_submission_uuid (str): The ID of the scoring student.
submission_uuid (str): The unique identifier of the submission being scored
Returns:
None
Raises:
PeerAssessmentWorkflowError: Could not find the workflow for the student.
PeerAssessmentInternalError: Could not create the peer workflow item.
"""
workflow = PeerWorkflow.get_by_submission_uuid(scorer_submission_uuid)
PeerWorkflow.create_item(workflow, submission_uuid)
def get_assessment_feedback(submission_uuid):
"""
Retrieve a feedback on an assessment.
Args:
submission_uuid: The submission we want to retrieve assessment feedback for.
Returns:
dict or None
Raises:
PeerAssessmentInternalError: Error occurred while retrieving the feedback.
"""
try:
feedback = AssessmentFeedback.objects.get(
submission_uuid=submission_uuid
)
return AssessmentFeedbackSerializer(feedback).data
except AssessmentFeedback.DoesNotExist:
return None
except DatabaseError:
error_message = (
u"An error occurred retrieving assessment feedback for {}."
.format(submission_uuid)
)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def set_assessment_feedback(feedback_dict):
"""
Set a feedback object for an assessment to have some new values.
Sets or updates the assessment feedback with the given values in the dict.
Args:
feedback_dict (dict): A dictionary of all the values to update or create
a new assessment feedback.
Returns:
None
Raises:
PeerAssessmentRequestError
PeerAssessmentInternalError
"""
submission_uuid = feedback_dict.get('submission_uuid')
feedback_text = feedback_dict.get('feedback_text')
selected_options = feedback_dict.get('options', list())
if feedback_text and len(feedback_text) > AssessmentFeedback.MAXSIZE:
error_message = u"Assessment feedback too large."
raise PeerAssessmentRequestError(error_message)
try:
# Get or create the assessment model for this submission
# If we receive an integrity error, assume that someone else is trying to create
# another feedback model for this submission, and raise an exception.
if submission_uuid:
feedback, created = AssessmentFeedback.objects.get_or_create(submission_uuid=submission_uuid)
else:
error_message = u"An error occurred creating assessment feedback: bad or missing submission_uuid."
logger.error(error_message)
raise PeerAssessmentRequestError(error_message)
# Update the feedback text
if feedback_text is not None:
feedback.feedback_text = feedback_text
# Save the feedback model. We need to do this before setting m2m relations.
if created or feedback_text is not None:
feedback.save()
# Associate the feedback with selected options
feedback.add_options(selected_options)
# Associate the feedback with scored assessments
assessments = PeerWorkflowItem.get_scored_assessments(submission_uuid)
feedback.assessments.add(*assessments)
except DatabaseError:
msg = u"Error occurred while creating or updating feedback on assessment: {}".format(feedback_dict)
logger.exception(msg)
raise PeerAssessmentInternalError(msg)
def _log_assessment(assessment, scorer_workflow):
"""
Log the creation of a peer assessment.
Args:
assessment (Assessment): The assessment model that was created.
scorer_workflow (dict): A dictionary representation of the Workflow
belonging to the scorer of this assessment.
Returns:
None
"""
logger.info(
u"Created peer-assessment {assessment_id} for submission "
u"{submission_uuid}, course {course_id}, item {item_id} "
u"with rubric {rubric_content_hash}; scored by {scorer}"
.format(
assessment_id=assessment.id,
submission_uuid=assessment.submission_uuid,
course_id=scorer_workflow.course_id,
item_id=scorer_workflow.item_id,
rubric_content_hash=assessment.rubric.content_hash,
scorer=scorer_workflow.student_id,
)
)
tags = [
u"course_id:{course_id}".format(course_id=scorer_workflow.course_id),
u"item_id:{item_id}".format(item_id=scorer_workflow.item_id),
u"type:peer",
]
score_percentage = assessment.to_float()
if score_percentage is not None:
dog_stats_api.histogram('openassessment.assessment.score_percentage', score_percentage, tags=tags)
# Calculate the time spent assessing
# This is the time from when the scorer retrieved the submission
# (created the peer workflow item) to when they completed an assessment.
# By this point, the assessment *should* have an associated peer workflow item,
# but if not, we simply skip the event.
try:
workflow_item = assessment.peerworkflowitem_set.get()
except (
PeerWorkflowItem.DoesNotExist,
PeerWorkflowItem.MultipleObjectsReturned,
DatabaseError
):
msg = u"Could not retrieve peer workflow item for assessment: {assessment}".format(
assessment=assessment.id
)
logger.exception(msg)
workflow_item = None
if workflow_item is not None:
time_delta = assessment.scored_at - workflow_item.started_at
dog_stats_api.histogram(
'openassessment.assessment.seconds_spent_assessing',
time_delta.total_seconds(),
tags=tags
)
dog_stats_api.increment('openassessment.assessment.count', tags=tags)
def _log_workflow(submission_uuid, workflow):
"""
Log the creation of a peer-assessment workflow.
Args:
submission_uuid (str): The UUID of the submission being assessed.
workflow (PeerWorkflow): The Peer Workflow of the student making the
assessment.
"""
logger.info(
u"Retrieved submission {} ({}, {}) to be assessed by {}"
.format(
submission_uuid,
workflow.course_id,
workflow.item_id,
workflow.student_id,
)
)
tags = [
u"course_id:{course_id}".format(course_id=workflow.course_id),
u"item_id:{item_id}".format(item_id=workflow.item_id),
u"type:peer"
]
# Over-grading is always turned on
# Keep this tag for backwards-compatibility
tags.append(u"overgrading")
dog_stats_api.increment('openassessment.assessment.peer_workflow.count', tags=tags)
def is_workflow_cancelled(submission_uuid):
"""
Check if workflow submission is cancelled.
Args:
submission_uuid (str): The UUID of the workflow's submission.
Returns:
True/False
"""
if submission_uuid is None:
return False
try:
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
return workflow.is_cancelled if workflow else False
except PeerAssessmentWorkflowError:
return False
def on_cancel(submission_uuid):
"""Cancel the peer workflow for submission.
Sets the cancelled_at field in peer workflow.
Args:
submission_uuid (str): The submission UUID associated with this workflow.
Returns:
None
"""
try:
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if workflow:
workflow.cancelled_at = timezone.now()
workflow.save()
except (PeerAssessmentWorkflowError, DatabaseError):
error_message = (
u"An internal error occurred while cancelling the peer"
u"workflow for submission {}"
.format(submission_uuid)
)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
| Edraak/edx-ora2 | openassessment/assessment/api/peer.py | Python | agpl-3.0 | 37,163 | 0.001453 |
import datetime
from dateutil.relativedelta import *
## give final date and time after parsing by changing current date-time
def change_datetime ( c="0", y=0, mt=0, w=0, d=0, h=0, m=0, s=0):
#mt = mt + 12*y
#d = d + 30*mt
now = datetime.datetime.now()
change = relativedelta( years =+ y, months =+ mt, weeks =+ w, days =+ d, hours =+ h, minutes =+ m, seconds =+ s)
#print (now + change)
if c == "date":
return (now + change).date()
elif c == "time":
return (now + change).time()
## make separate date and time functions
#def change_date (y=0, m=0, w=0, d=0):
#def change_time (h=0, m=0, s=0):
## make separate functions for setting date and time and print -- if not provided the data
## give final date and time after parsing by setting date-time
def set_datetime (y=0, mt=0, d=0, h=0, m=0, s=0, c="0"):
a = ""
if d!=0:
a = a + str(d) + "/"
if mt!=0:
a = a + str(mt) + "/"
if y!=0:
a = a + str(y)
#a = a + " "
if h!=0:
a = a + str(h) + ":"
if m!=0:
a = a + str(m) + ":"
if s!=0:
a = a + str(s)
if c!="0":
a = a + " "
a = a + str(c)
#print (a, "a")
return a
## make function for am/pm
def get_disease (string):
with open("dataset.txt") as f:
content = f.readlines()
names = []
definitions = []
values = []
check = 1
## TODO
## remove the common words from defintion (or input) (or use replace) like a, the,disease, etc. while splitting definition in words
## Also do stemming
## Go through dataset once manually to get these words
for word in content:
if word[0] == 'n':
## TODO think better way in which pop is not required, directly append only if required
if check == 1:
names.append(word)
check = 0
if check == 0:
names.pop()
names.append(word)
if word[0] == 'd':
definitions.append(word)
check = 1
values.append(0)
#string = input("Give Text:")
words = string.split(" ")
for word in words:
for defintion in definitions:
defintion.replace('. ',' ')
defintion.replace(', ',' ')
definition_words = defintion.split(" ")
if word in definition_words:
values[definitions.index(defintion)] += 1
#print (word)
highest = 0
index_of_highest = 0
answer = []
## TODO if there are more than one highest
for value in values:
if value > highest:
highest = value
index_of_highest = values.index(value)
answer.append(names[index_of_highest])
answer.append(highest)
answer.append(definitions[index_of_highest])
for word in words:
newd = definitions[index_of_highest].replace('. ',' ')
newda = newd.replace(', ',' ')
definition_words = newda.split(" ")
## cannot pass with or in split, find better way
#print (definition_words)
if word in definition_words:
values[definitions.index(defintion)] += 1
answer.append(word)
# print (definitions[index_of_highest][defintion.index(word)])
## make definition sort only usable things
## find a way like , and parameters for passing more than value in relplace
return answer
def get_sentences(str):
import re
## use of regular expressions
## str cannot be changed further, always make a new object
words = str.split(" ")
Abbrs = ['Mr.', 'mr.', 'Mrs.', 'mrs.', 'Dr.', 'dr.' , 'Er.', 'er.', 'Prof.', 'prof.', 'Br.', 'br.', 'Fr.', 'fr.', 'Sr.', 'sr.', 'Jr.', 'jr.']
SentenceType = []
for abbr in Abbrs:
if abbr in words:
new_word = abbr.replace(abbr[len(abbr)-1], "")
str = str.replace(abbr, new_word)
#print (new_str)
## str.replace(abbr[len(abbr)-1], " ")
## Do directly in string without using words
for word in words:
if re.findall(r'\.(.)+\.', word):
new_word = word.replace('.','')
str = str.replace(word, new_word)
#print (word)
#print (new_word)
#print (new_str2)
if '.' in word[0:len(word)-2]:
new_word = word.replace('.', '[dot]')
str = str.replace(word, new_word)
for letter in str:
if letter == '.':
SentenceType.append("Assertive")
if letter == '?':
SentenceType.append("Interrogative")
if letter == '!' or letter == '!!':
SentenceType.append('Exclamatory')
sentences = re.split("[ ]*[.|?|!|!!]+[ ]*", str)
if (str[len(str)-1] == '.') or (str[len(str)-1] == '?') or (str[len(str)-1] == '!'):
sentences.pop()
return dict(zip(sentences, SentenceType))
## TODOs
## Extend Abbrs list
## Dots back in sentences
## If abbr of acronyms with dots at end of a sentence?
## what if sentence doesn't end with !!? Get the expression from this word.
## If already a new line exist.
## Also implement through machine learning to obtain results without help of punctuation.
## Sentence Type : What about Imperative, compound, complex etc. Exclamatory Sentence or Word
## ensure sentences are returned sequentially
def get_tokens(str):
words = str.split(" ")
return words
## Make an algorithm for different kind of words for forming effective tokens before returning
| Chirayu-sopho/Hindi-DateTime-Parser | functions.py | Python | mit | 4,957 | 0.047004 |
from cocosCairo.cocosCairo import * # Convenience module to import all other modules
from splash import *
BACKGROUND_COLOR = Color(0.1, 0.3, 0.7)
MAZE_PATHS = ["maze01.maze", "maze02.maze", "maze03.maze"] # an ordered list of the maze files
PATH_INDEX = 0 # the index of the next maze file to load
class MazeScene(Scene):
def __init__(self, modelPath):
Scene.__init__(self)
self._modelPath = modelPath
def setup(self):
self.setBackgroundColor(BACKGROUND_COLOR)
def onEnterFromFinishedTransition(self):
Scene.onEnterFromFinishedTransition(self)
self._mazePathController = MazePathController(self._modelPath)
self.addController(self._mazePathController)
x = self.getSize().width/2
y = self.getSize().height/2
self._mazePathController.getNode().setPosition(Point(x,y))
self._mazePathController.getNode().setOpacity(0.0)
action = EaseSineInOut(FadeIn(1.0))
cbAction = CallbackInstantAction(self._onFadeInCompletion)
sequence = Sequence(action, cbAction)
self._mazePathController.getNode().runAction(sequence)
def _onFadeInCompletion(self):
self._mazePathController.getNode().showPieces()
class MazePathModel(AbstractModel):
def __init__(self, filepath):
AbstractModel.__init__(self)
self._modelArray = []
self._playerLocation = [0,0]
self._goalLocation = [0,0]
self._moveCount = 0
f = open(filepath)
# populate the model array
for line in f:
line = line.strip()
if len(line) < 1 or line[0] is "#" or line[:2] is "//": # if the line is a comment or empty
continue # then move on to the next line
row = line.split(',')
row = [int(x[:1]) for x in row if (len(x) > 0 and x != '\n')] # trim and convert to int
self._modelArray.append(row)
# look for special characters
for i in range(0, len(self._modelArray[0])):
for j in range(0, len(self._modelArray)):
if self._modelArray[j][i] is 2:
self._playerLocation = [i, j]
self._modelArray[j][i] = 1
elif self._modelArray[j][i] is 3:
self._goalLocation = [i, j]
self._modelArray[j][i] = 1
f.close()
self.didChange()
def getModelArray(self):
return self._modelArray
def getPlayerLocation(self):
return self._playerLocation
def getGoalLocation(self):
return self._goalLocation
def getMoveCount(self):
return self._moveCount
def movePlayerLocation(self, direction):
self._moveCount += 1
row = self._playerLocation[1]
col = self._playerLocation[0]
if direction == "left":
if col-1 < 0 or self._modelArray[row][col-1] != 1:
return
else:
self._playerLocation = [col-1, row]
self.didChange()
elif direction == "right":
if col+1 >= len(self._modelArray[0]) or self._modelArray[row][col+1] != 1:
return
else:
self._playerLocation = [col+1, row]
self.didChange()
elif direction == "up":
if row-1 < 0 or self._modelArray[row-1][col] != 1:
return
else:
self._playerLocation = [col, row-1]
self.didChange()
elif direction == "down":
if row+1 >= len(self._modelArray) or self._modelArray[row+1][col] != 1:
return
else:
self._playerLocation = [col, row+1]
self.didChange()
class MazePathNode(Node):
def __init__(self, rect=None):
Node.__init__(self, rect)
self._hasRenderedTiles = False
self._hasFinishedActions = False
self._player = None
self._goal = None
self._tileSize = 50
self.setAnchorPoint(Point(0.5, 0.5))
def setOpacity(self, opacity):
Node.setOpacity(self, opacity)
for child in self.getChildren():
child.setOpacity(opacity)
def onModelChange(self, model):
if not model:
return
# render the tiles
if not self._hasRenderedTiles:
self._hasRenderedTiles = True
modelArray = model.getModelArray()
width = self._tileSize * len(modelArray[0])
height = self._tileSize * len(modelArray)
self.setSize(Size(width, height))
for i in range(0, len(modelArray[0])):
for j in range(0, len(modelArray)):
x = i*self._tileSize
y = j*self._tileSize
w = self._tileSize
h = self._tileSize
rect = MakeRect(x, y, w, h)
if modelArray[j][i] is 0: # 'matrix' lookup is [row,col], but that's equivalent to (y,x) instead of (x,y), so switch the i,j indices
continue
else:
color = WhiteColor()
rectangle = RectangleNode(rect, color)
self.addChild(rectangle, 1)
# set up the player's sprite
x = model.getPlayerLocation()[0] * self._tileSize
y = model.getPlayerLocation()[1] * self._tileSize
if not self._player:
self._player = Sprite("images/character.png", Point(x,y))
self.addChild(self._player,3)
self._player.setScale(0.01)
self._player.setAnchorPoint(Point(0.5,0.5))
size = self._player.getSize().width
self._player.setPosition(pointAdd(self._player.getPosition(), Point(size/2, size/2)))
else:
self._hasFinishedActions = False
action = EaseSineInOut(MoveTo(0.05, Point(x,y)))
cbAction = CallbackInstantAction(self.onPlayerMotionCompletion)
sequence = Sequence(action, cbAction)
self._player.runAction(sequence)
# set up the goal sprite
x = model.getGoalLocation()[0] * self._tileSize
y = model.getGoalLocation()[1] * self._tileSize
if not self._goal:
self._goal = Sprite("images/goal.png", Point(x,y))
self.addChild(self._goal,2)
self._goal.setScale(0.01)
self._goal.setAnchorPoint(Point(0.5,0.5))
size = self._goal.getSize().width
self._goal.setPosition(pointAdd(self._goal.getPosition(), Point(size/2, size/2)))
else:
self._goal.setPosition(Point(x,y))
def showPieces(self):
if self._goal:
action = EaseBounceOut(ScaleTo(0.75, 1.0))
sequence = Sequence(action, CallbackInstantAction(self.onGoalScaleCompletion))
self._goal.runAction(sequence)
def onGoalScaleCompletion(self):
self._goal.setAnchorPoint(PointZero())
size = self._goal.getSize().width
self._goal.setPosition(pointSub(self._goal.getPosition(), Point(size/2, size/2)))
if self._player:
action = EaseBounceOut(ScaleTo(0.75, 1.0))
sequence = Sequence(action, CallbackInstantAction(self.onPlayerScaleCompletion))
self._player.runAction(sequence)
def onPlayerScaleCompletion(self):
self._player.setAnchorPoint(PointZero())
size = self._player.getSize().width
self._player.setPosition(pointSub(self._player.getPosition(), Point(size/2, size/2)))
self._hasFinishedActions = True
def onPlayerMotionCompletion(self):
self._hasFinishedActions = True
def reset(self):
self._hasRenderedTiles = False
self._hasFinishedActions = False
self.removeAllChildren()
self._player = None
self._goal = None
def hasFinishedActions(self):
return self._hasFinishedActions
class MazePathController(AbstractController):
def __init__(self, modelPath):
AbstractController.__init__(self, MazePathNode(RectZero()), MazePathModel(modelPath))
def onKeyPress(self, event):
if not self.getNode().hasFinishedActions():
return
key = event.key
if key == "Left":
self.getModel().movePlayerLocation("left")
elif key == "Right":
self.getModel().movePlayerLocation("right")
elif key == "Up":
self.getModel().movePlayerLocation("up")
elif key == "Down":
self.getModel().movePlayerLocation("down")
if self.getModel().getPlayerLocation() == self.getModel().getGoalLocation():
winScene = WinScene(self.getModel().getMoveCount())
transition = MoveInTopTransition(.5, winScene)
self.getDirector().replaceScene(transition)
return True
class WinScene(Scene, GestureListener):
def __init__(self, moveCount):
Scene.__init__(self)
self._currentCount = 0
self._moveCount = moveCount
def setup(self):
self.setBackgroundColor(WhiteColor())
self._label = PangoLabel()
self.setMarkupText(0)
self._label.setAnchorPoint(Point(0.5, 0.5))
self._label.setAlignment("center")
self._label.setFontSize(48)
self.addChild(self._label)
def onEnter(self):
Scene.onEnter(self)
self.getDirector().getGestureDispatch().addListener(self)
x = self.getSize().width/2
y = self.getSize().height/2
self._label.setPosition(Point(x,y))
def onEnterFromFinishedTransition(self):
Scene.onEnterFromFinishedTransition(self)
self.scheduleCallback(self._updateCount, 0.005)
def onExit(self):
Scene.onExit(self)
self.getDirector().getGestureDispatch().removeListener(self)
def _updateCount(self, dt):
self._currentCount += 1
self.setMarkupText(self._currentCount)
if self._currentCount >= self._moveCount:
self.unscheduleCallback(self._updateCount)
def setMarkupText(self, count):
if count < 10:
countString = "0"+str(count)
else:
countString = str(count)
markupText = '<span foreground="#000000" size="xx-large">You win!</span>' + \
'<span size="xx-small">\n\n</span>' + \
'<span foreground="#003399">You took\n' + \
'<span size="xx-large">' + countString + \
' moves\n</span>to complete the maze!</span>'
self._label.setMarkupText(markupText)
def onKeyPress(self, event):
self._onEvent()
return True
def onMousePress(self, event):
self._onEvent()
return True
def _onEvent(self):
global PATH_INDEX
global MAZE_PATHS
if PATH_INDEX < len(MAZE_PATHS):
path = MAZE_PATHS[PATH_INDEX]
PATH_INDEX += 1
transition = RotoZoomTransition(1.0, MazeScene(path))
self.getDirector().replaceScene(transition)
else:
PATH_INDEX = 0 # for right now, just loop through the mazes
self._onEvent()
if __name__ == "__main__":
director = Director()
director.setWindow()
path = MAZE_PATHS[PATH_INDEX]
PATH_INDEX += 1
transition = MoveInTopTransition(1.0, MazeScene(path))
director.runWithScene(SplashScene(transition))
#director.runWithScene(MazeScene("maze02.txt"))
| jeremyflores/cocosCairo | oldTests/maze.py | Python | mit | 9,600 | 0.030417 |
#!/usr/bin/python
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A chain with four possible intermediates with different notBefore and notAfter
dates, for testing path bulding prioritization.
"""
import sys
sys.path += ['../..']
import gencerts
DATE_A = '150101120000Z'
DATE_B = '150102120000Z'
DATE_C = '180101120000Z'
DATE_D = '180102120000Z'
root = gencerts.create_self_signed_root_certificate('Root')
root.set_validity_range(DATE_A, DATE_D)
int_ac = gencerts.create_intermediate_certificate('Intermediate', root)
int_ac.set_validity_range(DATE_A, DATE_C)
int_ad = gencerts.create_intermediate_certificate('Intermediate', root)
int_ad.set_validity_range(DATE_A, DATE_D)
int_ad.set_key(int_ac.get_key())
int_bc = gencerts.create_intermediate_certificate('Intermediate', root)
int_bc.set_validity_range(DATE_B, DATE_C)
int_bc.set_key(int_ac.get_key())
int_bd = gencerts.create_intermediate_certificate('Intermediate', root)
int_bd.set_validity_range(DATE_B, DATE_D)
int_bd.set_key(int_ac.get_key())
target = gencerts.create_end_entity_certificate('Target', int_ac)
target.set_validity_range(DATE_A, DATE_D)
gencerts.write_chain('The root', [root], out_pem='root.pem')
gencerts.write_chain('Intermediate with validity range A..C',
[int_ac], out_pem='int_ac.pem')
gencerts.write_chain('Intermediate with validity range A..D',
[int_ad], out_pem='int_ad.pem')
gencerts.write_chain('Intermediate with validity range B..C',
[int_bc], out_pem='int_bc.pem')
gencerts.write_chain('Intermediate with validity range B..D',
[int_bd], out_pem='int_bd.pem')
gencerts.write_chain('The target', [target], out_pem='target.pem')
| endlessm/chromium-browser | net/data/path_builder_unittest/validity_date_prioritization/generate-certs.py | Python | bsd-3-clause | 1,833 | 0.001091 |
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class IpAccessControlListMappingList(ListResource):
def __init__(self, version, account_sid, domain_sid):
"""
Initialize the IpAccessControlListMappingList
:param Version version: Version that contains the resource
:param account_sid: The unique id of the Account that is responsible for this resource.
:param domain_sid: The unique string that identifies the SipDomain resource.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingList
"""
super(IpAccessControlListMappingList, self).__init__(version)
# Path Solution
self._solution = {'account_sid': account_sid, 'domain_sid': domain_sid, }
self._uri = '/Accounts/{account_sid}/SIP/Domains/{domain_sid}/IpAccessControlListMappings.json'.format(**self._solution)
def create(self, ip_access_control_list_sid):
"""
Create the IpAccessControlListMappingInstance
:param unicode ip_access_control_list_sid: The unique id of the IP access control list to map to the SIP domain
:returns: The created IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
"""
data = values.of({'IpAccessControlListSid': ip_access_control_list_sid, })
payload = self._version.create(method='POST', uri=self._uri, data=data, )
return IpAccessControlListMappingInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
)
def stream(self, limit=None, page_size=None):
"""
Streams IpAccessControlListMappingInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(page_size=limits['page_size'], )
return self._version.stream(page, limits['limit'])
def list(self, limit=None, page_size=None):
"""
Lists IpAccessControlListMappingInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance]
"""
return list(self.stream(limit=limit, page_size=page_size, ))
def page(self, page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of IpAccessControlListMappingInstance records from the API.
Request is executed immediately
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingPage
"""
data = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, })
response = self._version.page(method='GET', uri=self._uri, params=data, )
return IpAccessControlListMappingPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of IpAccessControlListMappingInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return IpAccessControlListMappingPage(self._version, response, self._solution)
def get(self, sid):
"""
Constructs a IpAccessControlListMappingContext
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
return IpAccessControlListMappingContext(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=sid,
)
def __call__(self, sid):
"""
Constructs a IpAccessControlListMappingContext
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
return IpAccessControlListMappingContext(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=sid,
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.IpAccessControlListMappingList>'
class IpAccessControlListMappingPage(Page):
def __init__(self, version, response, solution):
"""
Initialize the IpAccessControlListMappingPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The unique id of the Account that is responsible for this resource.
:param domain_sid: The unique string that identifies the SipDomain resource.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingPage
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingPage
"""
super(IpAccessControlListMappingPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of IpAccessControlListMappingInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
"""
return IpAccessControlListMappingInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.IpAccessControlListMappingPage>'
class IpAccessControlListMappingContext(InstanceContext):
def __init__(self, version, account_sid, domain_sid, sid):
"""
Initialize the IpAccessControlListMappingContext
:param Version version: Version that contains the resource
:param account_sid: The unique id of the Account that is responsible for this resource.
:param domain_sid: A string that uniquely identifies the SIP Domain
:param sid: A 34 character string that uniquely identifies the resource to fetch.
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
super(IpAccessControlListMappingContext, self).__init__(version)
# Path Solution
self._solution = {'account_sid': account_sid, 'domain_sid': domain_sid, 'sid': sid, }
self._uri = '/Accounts/{account_sid}/SIP/Domains/{domain_sid}/IpAccessControlListMappings/{sid}.json'.format(**self._solution)
def fetch(self):
"""
Fetch the IpAccessControlListMappingInstance
:returns: The fetched IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return IpAccessControlListMappingInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=self._solution['sid'],
)
def delete(self):
"""
Deletes the IpAccessControlListMappingInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete(method='DELETE', uri=self._uri, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Api.V2010.IpAccessControlListMappingContext {}>'.format(context)
class IpAccessControlListMappingInstance(InstanceResource):
def __init__(self, version, payload, account_sid, domain_sid, sid=None):
"""
Initialize the IpAccessControlListMappingInstance
:returns: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
"""
super(IpAccessControlListMappingInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload.get('account_sid'),
'date_created': deserialize.rfc2822_datetime(payload.get('date_created')),
'date_updated': deserialize.rfc2822_datetime(payload.get('date_updated')),
'domain_sid': payload.get('domain_sid'),
'friendly_name': payload.get('friendly_name'),
'sid': payload.get('sid'),
'uri': payload.get('uri'),
}
# Context
self._context = None
self._solution = {
'account_sid': account_sid,
'domain_sid': domain_sid,
'sid': sid or self._properties['sid'],
}
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: IpAccessControlListMappingContext for this IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingContext
"""
if self._context is None:
self._context = IpAccessControlListMappingContext(
self._version,
account_sid=self._solution['account_sid'],
domain_sid=self._solution['domain_sid'],
sid=self._solution['sid'],
)
return self._context
@property
def account_sid(self):
"""
:returns: The unique id of the Account that is responsible for this resource.
:rtype: unicode
"""
return self._properties['account_sid']
@property
def date_created(self):
"""
:returns: The date that this resource was created, given as GMT in RFC 2822 format.
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The date that this resource was last updated, given as GMT in RFC 2822 format.
:rtype: datetime
"""
return self._properties['date_updated']
@property
def domain_sid(self):
"""
:returns: The unique string that identifies the SipDomain resource.
:rtype: unicode
"""
return self._properties['domain_sid']
@property
def friendly_name(self):
"""
:returns: A human readable descriptive text for this resource, up to 64 characters long.
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def sid(self):
"""
:returns: A 34 character string that uniquely identifies this resource.
:rtype: unicode
"""
return self._properties['sid']
@property
def uri(self):
"""
:returns: The URI for this resource, relative to https://api.twilio.com
:rtype: unicode
"""
return self._properties['uri']
def fetch(self):
"""
Fetch the IpAccessControlListMappingInstance
:returns: The fetched IpAccessControlListMappingInstance
:rtype: twilio.rest.api.v2010.account.sip.domain.ip_access_control_list_mapping.IpAccessControlListMappingInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the IpAccessControlListMappingInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Api.V2010.IpAccessControlListMappingInstance {}>'.format(context)
| twilio/twilio-python | twilio/rest/api/v2010/account/sip/domain/ip_access_control_list_mapping.py | Python | mit | 16,185 | 0.00451 |
#==============================================================================
#description : Solves travelling salesman problem by using Hill Climbing.
#author : Yakup Cengiz
#date : 20151121
#version : 0.1
#notes :
#python_version : 3.5.0
#Reference : http://www.psychicorigami.com/category/tsp/
#==============================================================================
import math
import sys
import os
import random
CommonPath = os.path.abspath(os.path.join('..', 'Common'))
sys.path.append(CommonPath)
import tsp
def GenerateInitialPath(tour_length):
tour=list(range(tour_length))
random.shuffle(tour)
return tour
MAX_ITERATION = 50000
def reversed_sections(tour):
'''generator to return all possible variations where the section between two cities are swapped'''
for i,j in tsp.AllEdges(len(tour)):
if i != j:
copy=tour[:]
if i < j:
copy[i:j+1]=reversed(tour[i:j+1])
else:
copy[i+1:]=reversed(tour[:j])
copy[:j]=reversed(tour[i+1:])
if copy != tour: # no point returning the same tour
yield copy
def kirkpatrick_cooling(start_temp, alpha):
T = start_temp
while True:
yield T
T = alpha * T
def P(prev_score,next_score,temperature):
if next_score > prev_score:
return 1.0
else:
return math.exp( -abs(next_score-prev_score)/temperature )
class ObjectiveFunction:
'''class to wrap an objective function and
keep track of the best solution evaluated'''
def __init__(self,objective_function):
self.objective_function=objective_function
self.best=None
self.best_score=None
def __call__(self,solution):
score=self.objective_function(solution)
if self.best is None or score > self.best_score:
self.best_score=score
self.best=solution
return score
def ApplySimulatedAnnealing(init_function,move_operator,objective_function,max_evaluations,start_temp,alpha):
# wrap the objective function (so we record the best)
objective_function=ObjectiveFunction(objective_function)
current = init_function()
current_score = objective_function(current)
iterationCount = 1
cooling_schedule = kirkpatrick_cooling(start_temp, alpha)
for temperature in cooling_schedule:
done = False
# examine moves around our current position
for next in move_operator(current):
if iterationCount >= max_evaluations:
done=True
break
next_score=objective_function(next)
iterationCount+=1
# probablistically accept this solution always accepting better solutions
p = P(current_score, next_score, temperature)
# random.random() basic function random() generates a random float uniformly in the range [0.0, 1.0).
# p function returns data in range [0.0, 1.0]
if random.random() < p:
current = next
current_score= next_score
break
# see if completely finished
if done: break
best_score = objective_function.best_score
best = objective_function.best
return (iterationCount,best_score,best)
def SolveTSP():
print("Starting to solve travel salesman problem")
coordinates = tsp.ReadCoordinatesFromFile(".\cityCoordinates.csv")
distance_matrix = tsp.ComputeDistanceMatrix(coordinates);
init_function = lambda: GenerateInitialPath(len(coordinates))
objective_function = lambda tour: -tsp.ComputeTourLength(distance_matrix, tour)
start_temp,alpha = 100, 0.995
iterationCount,best_score,shortestPath = ApplySimulatedAnnealing(init_function, reversed_sections, objective_function, MAX_ITERATION,start_temp,alpha)
print(iterationCount, best_score, shortestPath);
tsp.DrawPath(coordinates, shortestPath, "TSP.png");
if __name__ == "__main__":
SolveTSP(); | yakupc/Artificial-Intelligence | Algorithms/SolveTSPSimulatedAnnealing/SolveTSPSimulatedAnnealing.py | Python | mit | 4,196 | 0.023832 |
import math
import sys
from functools import lru_cache
from typing import Optional, Union # noqa
import urwid
from mitmproxy import contentviews
from mitmproxy import http
from mitmproxy.tools.console import common
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import flowdetailview
from mitmproxy.tools.console import searchable
from mitmproxy.tools.console import tabs
import mitmproxy.tools.console.master # noqa
from mitmproxy.utils import strutils
class SearchError(Exception):
pass
class FlowViewHeader(urwid.WidgetWrap):
def __init__(
self,
master: "mitmproxy.tools.console.master.ConsoleMaster",
) -> None:
self.master = master
self.focus_changed()
def focus_changed(self):
cols, _ = self.master.ui.get_cols_rows()
if self.master.view.focus.flow:
self._w = common.format_flow(
self.master.view.focus.flow,
False,
extended=True,
hostheader=self.master.options.showhost,
max_url_len=cols,
)
else:
self._w = urwid.Pile([])
class FlowDetails(tabs.Tabs):
def __init__(self, master):
self.master = master
super().__init__([])
self.show()
self.last_displayed_body = None
def focus_changed(self):
if self.master.view.focus.flow:
self.tabs = [
(self.tab_request, self.view_request),
(self.tab_response, self.view_response),
(self.tab_details, self.view_details),
]
self.show()
else:
self.master.window.pop()
@property
def view(self):
return self.master.view
@property
def flow(self):
return self.master.view.focus.flow
def tab_request(self):
if self.flow.intercepted and not self.flow.response:
return "Request intercepted"
else:
return "Request"
def tab_response(self):
if self.flow.intercepted and self.flow.response:
return "Response intercepted"
else:
return "Response"
def tab_details(self):
return "Detail"
def view_request(self):
return self.conn_text(self.flow.request)
def view_response(self):
return self.conn_text(self.flow.response)
def view_details(self):
return flowdetailview.flowdetails(self.view, self.flow)
def content_view(self, viewmode, message):
if message.raw_content is None:
msg, body = "", [urwid.Text([("error", "[content missing]")])]
return msg, body
else:
full = self.master.commands.execute("view.getval @focus fullcontents false")
if full == "true":
limit = sys.maxsize
else:
limit = contentviews.VIEW_CUTOFF
flow_modify_cache_invalidation = hash((
message.raw_content,
message.headers.fields,
getattr(message, "path", None),
))
# we need to pass the message off-band because it's not hashable
self._get_content_view_message = message
return self._get_content_view(viewmode, limit, flow_modify_cache_invalidation)
@lru_cache(maxsize=200)
def _get_content_view(self, viewmode, max_lines, _):
message = self._get_content_view_message
self._get_content_view_message = None
description, lines, error = contentviews.get_message_content_view(
viewmode, message
)
if error:
self.master.log.debug(error)
# Give hint that you have to tab for the response.
if description == "No content" and isinstance(message, http.HTTPRequest):
description = "No request content (press tab to view response)"
# If the users has a wide terminal, he gets fewer lines; this should not be an issue.
chars_per_line = 80
max_chars = max_lines * chars_per_line
total_chars = 0
text_objects = []
for line in lines:
txt = []
for (style, text) in line:
if total_chars + len(text) > max_chars:
text = text[:max_chars - total_chars]
txt.append((style, text))
total_chars += len(text)
if total_chars == max_chars:
break
# round up to the next line.
total_chars = int(math.ceil(total_chars / chars_per_line) * chars_per_line)
text_objects.append(urwid.Text(txt))
if total_chars == max_chars:
text_objects.append(urwid.Text([
("highlight", "Stopped displaying data after %d lines. Press " % max_lines),
("key", "f"),
("highlight", " to load all data.")
]))
break
return description, text_objects
def conn_text(self, conn):
if conn:
hdrs = []
for k, v in conn.headers.fields:
# This will always force an ascii representation of headers. For example, if the server sends a
#
# X-Authors: Made with ❤ in Hamburg
#
# header, mitmproxy will display the following:
#
# X-Authors: Made with \xe2\x9d\xa4 in Hamburg.
#
# The alternative would be to just use the header's UTF-8 representation and maybe
# do `str.replace("\t", "\\t")` to exempt tabs from urwid's special characters escaping [1].
# That would in some terminals allow rendering UTF-8 characters, but the mapping
# wouldn't be bijective, i.e. a user couldn't distinguish "\\t" and "\t".
# Also, from a security perspective, a mitmproxy user couldn't be fooled by homoglyphs.
#
# 1) https://github.com/mitmproxy/mitmproxy/issues/1833
# https://github.com/urwid/urwid/blob/6608ee2c9932d264abd1171468d833b7a4082e13/urwid/display_common.py#L35-L36,
k = strutils.bytes_to_escaped_str(k) + ":"
v = strutils.bytes_to_escaped_str(v)
hdrs.append((k, v))
txt = common.format_keyvals(
hdrs,
key_format="header"
)
viewmode = self.master.commands.call("console.flowview.mode")
msg, body = self.content_view(viewmode, conn)
cols = [
urwid.Text(
[
("heading", msg),
]
),
urwid.Text(
[
" ",
('heading', "["),
('heading_key', "m"),
('heading', (":%s]" % viewmode)),
],
align="right"
)
]
title = urwid.AttrWrap(urwid.Columns(cols), "heading")
txt.append(title)
txt.extend(body)
else:
txt = [
urwid.Text(""),
urwid.Text(
[
("highlight", "No response. Press "),
("key", "e"),
("highlight", " and edit any aspect to add one."),
]
)
]
return searchable.Searchable(txt)
class FlowView(urwid.Frame, layoutwidget.LayoutWidget):
keyctx = "flowview"
title = "Flow Details"
def __init__(self, master):
super().__init__(
FlowDetails(master),
header = FlowViewHeader(master),
)
self.master = master
def focus_changed(self, *args, **kwargs):
self.body.focus_changed()
self.header.focus_changed()
| cortesi/mitmproxy | mitmproxy/tools/console/flowview.py | Python | mit | 8,005 | 0.001749 |
#!/usr/bin/env python
"""
cpuinfo
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Pearu Peterson
"""
from __future__ import division, absolute_import, print_function
__all__ = ['cpu']
import sys, re, types
import os
if sys.version_info[0] >= 3:
from subprocess import getstatusoutput
else:
from commands import getstatusoutput
import warnings
import platform
from numpy.distutils.compat import get_exception
def getoutput(cmd, successful_status=(0,), stacklevel=1):
try:
status, output = getstatusoutput(cmd)
except EnvironmentError:
e = get_exception()
warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
return False, ""
if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
return True, output
return False, output
def command_info(successful_status=(0,), stacklevel=1, **kw):
info = {}
for key in kw:
ok, output = getoutput(kw[key], successful_status=successful_status,
stacklevel=stacklevel+1)
if ok:
info[key] = output.strip()
return info
def command_by_line(cmd, successful_status=(0,), stacklevel=1):
ok, output = getoutput(cmd, successful_status=successful_status,
stacklevel=stacklevel+1)
if not ok:
return
for line in output.splitlines():
yield line.strip()
def key_value_from_command(cmd, sep, successful_status=(0,),
stacklevel=1):
d = {}
for line in command_by_line(cmd, successful_status=successful_status,
stacklevel=stacklevel+1):
l = [s.strip() for s in line.split(sep, 1)]
if len(l) == 2:
d[l[0]] = l[1]
return d
class CPUInfoBase(object):
"""Holds CPU information and provides methods for requiring
the availability of various CPU features.
"""
def _try_call(self, func):
try:
return func()
except Exception:
pass
def __getattr__(self, name):
if not name.startswith('_'):
if hasattr(self, '_'+name):
attr = getattr(self, '_'+name)
if isinstance(attr, types.MethodType):
return lambda func=self._try_call,attr=attr : func(attr)
else:
return lambda : None
raise AttributeError(name)
def _getNCPUs(self):
return 1
def __get_nbits(self):
abits = platform.architecture()[0]
nbits = re.compile(r'(\d+)bit').search(abits).group(1)
return nbits
def _is_32bit(self):
return self.__get_nbits() == '32'
def _is_64bit(self):
return self.__get_nbits() == '64'
class LinuxCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = [ {} ]
ok, output = getoutput('uname -m')
if ok:
info[0]['uname_m'] = output.strip()
try:
fo = open('/proc/cpuinfo')
except EnvironmentError:
e = get_exception()
warnings.warn(str(e), UserWarning, stacklevel=2)
else:
for line in fo:
name_value = [s.strip() for s in line.split(':', 1)]
if len(name_value) != 2:
continue
name, value = name_value
if not info or name in info[-1]: # next processor
info.append({})
info[-1][name] = value
fo.close()
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['vendor_id']=='AuthenticAMD'
def _is_AthlonK6_2(self):
return self._is_AMD() and self.info[0]['model'] == '2'
def _is_AthlonK6_3(self):
return self._is_AMD() and self.info[0]['model'] == '3'
def _is_AthlonK6(self):
return re.match(r'.*?AMD-K6', self.info[0]['model name']) is not None
def _is_AthlonK7(self):
return re.match(r'.*?AMD-K7', self.info[0]['model name']) is not None
def _is_AthlonMP(self):
return re.match(r'.*?Athlon\(tm\) MP\b',
self.info[0]['model name']) is not None
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['family'] == '15'
def _is_Athlon64(self):
return re.match(r'.*?Athlon\(tm\) 64\b',
self.info[0]['model name']) is not None
def _is_AthlonHX(self):
return re.match(r'.*?Athlon HX\b',
self.info[0]['model name']) is not None
def _is_Opteron(self):
return re.match(r'.*?Opteron\b',
self.info[0]['model name']) is not None
def _is_Hammer(self):
return re.match(r'.*?Hammer\b',
self.info[0]['model name']) is not None
# Alpha
def _is_Alpha(self):
return self.info[0]['cpu']=='Alpha'
def _is_EV4(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
def _is_EV5(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
def _is_EV56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
def _is_PCA56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
# Intel
#XXX
_is_i386 = _not_impl
def _is_Intel(self):
return self.info[0]['vendor_id']=='GenuineIntel'
def _is_i486(self):
return self.info[0]['cpu']=='i486'
def _is_i586(self):
return self.is_Intel() and self.info[0]['cpu family'] == '5'
def _is_i686(self):
return self.is_Intel() and self.info[0]['cpu family'] == '6'
def _is_Celeron(self):
return re.match(r'.*?Celeron',
self.info[0]['model name']) is not None
def _is_Pentium(self):
return re.match(r'.*?Pentium',
self.info[0]['model name']) is not None
def _is_PentiumII(self):
return re.match(r'.*?Pentium.*?II\b',
self.info[0]['model name']) is not None
def _is_PentiumPro(self):
return re.match(r'.*?PentiumPro\b',
self.info[0]['model name']) is not None
def _is_PentiumMMX(self):
return re.match(r'.*?Pentium.*?MMX\b',
self.info[0]['model name']) is not None
def _is_PentiumIII(self):
return re.match(r'.*?Pentium.*?III\b',
self.info[0]['model name']) is not None
def _is_PentiumIV(self):
return re.match(r'.*?Pentium.*?(IV|4)\b',
self.info[0]['model name']) is not None
def _is_PentiumM(self):
return re.match(r'.*?Pentium.*?M\b',
self.info[0]['model name']) is not None
def _is_Prescott(self):
return self.is_PentiumIV() and self.has_sse3()
def _is_Nocona(self):
return (self.is_Intel()
and (self.info[0]['cpu family'] == '6'
or self.info[0]['cpu family'] == '15')
and (self.has_sse3() and not self.has_ssse3())
and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None)
def _is_Core2(self):
return (self.is_64bit() and self.is_Intel() and
re.match(r'.*?Core\(TM\)2\b',
self.info[0]['model name']) is not None)
def _is_Itanium(self):
return re.match(r'.*?Itanium\b',
self.info[0]['family']) is not None
def _is_XEON(self):
return re.match(r'.*?XEON\b',
self.info[0]['model name'], re.IGNORECASE) is not None
_is_Xeon = _is_XEON
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_fdiv_bug(self):
return self.info[0]['fdiv_bug']=='yes'
def _has_f00f_bug(self):
return self.info[0]['f00f_bug']=='yes'
def _has_mmx(self):
return re.match(r'.*?\bmmx\b', self.info[0]['flags']) is not None
def _has_sse(self):
return re.match(r'.*?\bsse\b', self.info[0]['flags']) is not None
def _has_sse2(self):
return re.match(r'.*?\bsse2\b', self.info[0]['flags']) is not None
def _has_sse3(self):
return re.match(r'.*?\bpni\b', self.info[0]['flags']) is not None
def _has_ssse3(self):
return re.match(r'.*?\bssse3\b', self.info[0]['flags']) is not None
def _has_3dnow(self):
return re.match(r'.*?\b3dnow\b', self.info[0]['flags']) is not None
def _has_3dnowext(self):
return re.match(r'.*?\b3dnowext\b', self.info[0]['flags']) is not None
class IRIXCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = key_value_from_command('sysconf', sep=' ',
successful_status=(0, 1))
self.__class__.info = info
def _not_impl(self): pass
def _is_singleCPU(self):
return self.info.get('NUM_PROCESSORS') == '1'
def _getNCPUs(self):
return int(self.info.get('NUM_PROCESSORS', 1))
def __cputype(self, n):
return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
def _is_r2000(self): return self.__cputype(2000)
def _is_r3000(self): return self.__cputype(3000)
def _is_r3900(self): return self.__cputype(3900)
def _is_r4000(self): return self.__cputype(4000)
def _is_r4100(self): return self.__cputype(4100)
def _is_r4300(self): return self.__cputype(4300)
def _is_r4400(self): return self.__cputype(4400)
def _is_r4600(self): return self.__cputype(4600)
def _is_r4650(self): return self.__cputype(4650)
def _is_r5000(self): return self.__cputype(5000)
def _is_r6000(self): return self.__cputype(6000)
def _is_r8000(self): return self.__cputype(8000)
def _is_r10000(self): return self.__cputype(10000)
def _is_r12000(self): return self.__cputype(12000)
def _is_rorion(self): return self.__cputype('orion')
def get_ip(self):
try: return self.info.get('MACHINE')
except Exception: pass
def __machine(self, n):
return self.info.get('MACHINE').lower() == 'ip%s' % (n)
def _is_IP19(self): return self.__machine(19)
def _is_IP20(self): return self.__machine(20)
def _is_IP21(self): return self.__machine(21)
def _is_IP22(self): return self.__machine(22)
def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000()
def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000()
def _is_IP24(self): return self.__machine(24)
def _is_IP25(self): return self.__machine(25)
def _is_IP26(self): return self.__machine(26)
def _is_IP27(self): return self.__machine(27)
def _is_IP28(self): return self.__machine(28)
def _is_IP30(self): return self.__machine(30)
def _is_IP32(self): return self.__machine(32)
def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000()
def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000()
class DarwinCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
machine='machine')
info['sysctl_hw'] = key_value_from_command('sysctl hw', sep='=')
self.__class__.info = info
def _not_impl(self): pass
def _getNCPUs(self):
return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
def _is_Power_Macintosh(self):
return self.info['sysctl_hw']['hw.machine']=='Power Macintosh'
def _is_i386(self):
return self.info['arch']=='i386'
def _is_ppc(self):
return self.info['arch']=='ppc'
def __machine(self, n):
return self.info['machine'] == 'ppc%s'%n
def _is_ppc601(self): return self.__machine(601)
def _is_ppc602(self): return self.__machine(602)
def _is_ppc603(self): return self.__machine(603)
def _is_ppc603e(self): return self.__machine('603e')
def _is_ppc604(self): return self.__machine(604)
def _is_ppc604e(self): return self.__machine('604e')
def _is_ppc620(self): return self.__machine(620)
def _is_ppc630(self): return self.__machine(630)
def _is_ppc740(self): return self.__machine(740)
def _is_ppc7400(self): return self.__machine(7400)
def _is_ppc7450(self): return self.__machine(7450)
def _is_ppc750(self): return self.__machine(750)
def _is_ppc403(self): return self.__machine(403)
def _is_ppc505(self): return self.__machine(505)
def _is_ppc801(self): return self.__machine(801)
def _is_ppc821(self): return self.__machine(821)
def _is_ppc823(self): return self.__machine(823)
def _is_ppc860(self): return self.__machine(860)
class SunOSCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
mach='mach',
uname_i='uname_i',
isainfo_b='isainfo -b',
isainfo_n='isainfo -n',
)
info['uname_X'] = key_value_from_command('uname -X', sep='=')
for line in command_by_line('psrinfo -v 0'):
m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line)
if m:
info['processor'] = m.group('p')
break
self.__class__.info = info
def _not_impl(self): pass
def _is_i386(self):
return self.info['isainfo_n']=='i386'
def _is_sparc(self):
return self.info['isainfo_n']=='sparc'
def _is_sparcv9(self):
return self.info['isainfo_n']=='sparcv9'
def _getNCPUs(self):
return int(self.info['uname_X'].get('NumCPU', 1))
def _is_sun4(self):
return self.info['arch']=='sun4'
def _is_SUNW(self):
return re.match(r'SUNW', self.info['uname_i']) is not None
def _is_sparcstation5(self):
return re.match(r'.*SPARCstation-5', self.info['uname_i']) is not None
def _is_ultra1(self):
return re.match(r'.*Ultra-1', self.info['uname_i']) is not None
def _is_ultra250(self):
return re.match(r'.*Ultra-250', self.info['uname_i']) is not None
def _is_ultra2(self):
return re.match(r'.*Ultra-2', self.info['uname_i']) is not None
def _is_ultra30(self):
return re.match(r'.*Ultra-30', self.info['uname_i']) is not None
def _is_ultra4(self):
return re.match(r'.*Ultra-4', self.info['uname_i']) is not None
def _is_ultra5_10(self):
return re.match(r'.*Ultra-5_10', self.info['uname_i']) is not None
def _is_ultra5(self):
return re.match(r'.*Ultra-5', self.info['uname_i']) is not None
def _is_ultra60(self):
return re.match(r'.*Ultra-60', self.info['uname_i']) is not None
def _is_ultra80(self):
return re.match(r'.*Ultra-80', self.info['uname_i']) is not None
def _is_ultraenterprice(self):
return re.match(r'.*Ultra-Enterprise', self.info['uname_i']) is not None
def _is_ultraenterprice10k(self):
return re.match(r'.*Ultra-Enterprise-10000', self.info['uname_i']) is not None
def _is_sunfire(self):
return re.match(r'.*Sun-Fire', self.info['uname_i']) is not None
def _is_ultra(self):
return re.match(r'.*Ultra', self.info['uname_i']) is not None
def _is_cpusparcv7(self):
return self.info['processor']=='sparcv7'
def _is_cpusparcv8(self):
return self.info['processor']=='sparcv8'
def _is_cpusparcv9(self):
return self.info['processor']=='sparcv9'
class Win32CPUInfo(CPUInfoBase):
info = None
pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor"
# XXX: what does the value of
# HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
# mean?
def __init__(self):
if self.info is not None:
return
info = []
try:
#XXX: Bad style to use so long `try:...except:...`. Fix it!
if sys.version_info[0] >= 3:
import winreg
else:
import _winreg as winreg
prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"
r"\s+stepping\s+(?P<STP>\d+)", re.IGNORECASE)
chnd=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, self.pkey)
pnum=0
while True:
try:
proc=winreg.EnumKey(chnd, pnum)
except winreg.error:
break
else:
pnum+=1
info.append({"Processor":proc})
phnd=winreg.OpenKey(chnd, proc)
pidx=0
while True:
try:
name, value, vtpe=winreg.EnumValue(phnd, pidx)
except winreg.error:
break
else:
pidx=pidx+1
info[-1][name]=value
if name=="Identifier":
srch=prgx.search(value)
if srch:
info[-1]["Family"]=int(srch.group("FML"))
info[-1]["Model"]=int(srch.group("MDL"))
info[-1]["Stepping"]=int(srch.group("STP"))
except Exception:
print(sys.exc_info()[1], '(ignoring)')
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['VendorIdentifier']=='AuthenticAMD'
def _is_Am486(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_Am5x86(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_AMDK5(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [0, 1, 2, 3]
def _is_AMDK6(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [6, 7]
def _is_AMDK6_2(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==8
def _is_AMDK6_3(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==9
def _is_AMDK7(self):
return self.is_AMD() and self.info[0]['Family'] == 6
# To reliably distinguish between the different types of AMD64 chips
# (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would
# require looking at the 'brand' from cpuid
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['Family'] == 15
# Intel
def _is_Intel(self):
return self.info[0]['VendorIdentifier']=='GenuineIntel'
def _is_i386(self):
return self.info[0]['Family']==3
def _is_i486(self):
return self.info[0]['Family']==4
def _is_i586(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_i686(self):
return self.is_Intel() and self.info[0]['Family']==6
def _is_Pentium(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_PentiumMMX(self):
return self.is_Intel() and self.info[0]['Family']==5 \
and self.info[0]['Model']==4
def _is_PentiumPro(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model']==1
def _is_PentiumII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [3, 5, 6]
def _is_PentiumIII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [7, 8, 9, 10, 11]
def _is_PentiumIV(self):
return self.is_Intel() and self.info[0]['Family']==15
def _is_PentiumM(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [9, 13, 14]
def _is_Core2(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [15, 16, 17]
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_mmx(self):
if self.is_Intel():
return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \
or (self.info[0]['Family'] in [6, 15])
elif self.is_AMD():
return self.info[0]['Family'] in [5, 6, 15]
else:
return False
def _has_sse(self):
if self.is_Intel():
return ((self.info[0]['Family']==6 and
self.info[0]['Model'] in [7, 8, 9, 10, 11])
or self.info[0]['Family']==15)
elif self.is_AMD():
return ((self.info[0]['Family']==6 and
self.info[0]['Model'] in [6, 7, 8, 10])
or self.info[0]['Family']==15)
else:
return False
def _has_sse2(self):
if self.is_Intel():
return self.is_Pentium4() or self.is_PentiumM() \
or self.is_Core2()
elif self.is_AMD():
return self.is_AMD64()
else:
return False
def _has_3dnow(self):
return self.is_AMD() and self.info[0]['Family'] in [5, 6, 15]
def _has_3dnowext(self):
return self.is_AMD() and self.info[0]['Family'] in [6, 15]
if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?)
cpuinfo = LinuxCPUInfo
elif sys.platform.startswith('irix'):
cpuinfo = IRIXCPUInfo
elif sys.platform == 'darwin':
cpuinfo = DarwinCPUInfo
elif sys.platform.startswith('sunos'):
cpuinfo = SunOSCPUInfo
elif sys.platform.startswith('win32'):
cpuinfo = Win32CPUInfo
elif sys.platform.startswith('cygwin'):
cpuinfo = LinuxCPUInfo
#XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
else:
cpuinfo = CPUInfoBase
cpu = cpuinfo()
#if __name__ == "__main__":
#
# cpu.is_blaa()
# cpu.is_Intel()
# cpu.is_Alpha()
#
# print('CPU information:'),
# for name in dir(cpuinfo):
# if name[0]=='_' and name[1]!='_':
# r = getattr(cpu,name[1:])()
# if r:
# if r!=1:
# print('%s=%s' %(name[1:],r))
# else:
# print(name[1:]),
# print()
| MSeifert04/numpy | numpy/distutils/cpuinfo.py | Python | bsd-3-clause | 23,013 | 0.00491 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Above the run-comment and file encoding comment.
# TODO FIXME XXX
# Keywords
with break continue del exec return pass print raise global assert lambda yield
for while if elif else import from as try except finally and in is not or
yield from
def functionname
class Classname
def 哈哈
class 哈哈
await
async def Test
async with
async for
# Builtin objects.
True False Ellipsis None NotImplemented
# Builtin function and types.
__import__ abs all any apply basestring bool buffer callable chr classmethod
cmp coerce compile complex delattr dict dir divmod enumerate eval execfile file
filter float frozenset getattr globals hasattr hash help hex id input int
intern isinstance issubclass iter len list locals long map max min object oct
open ord pow property range raw_input reduce reload repr reversed round set
setattr slice sorted staticmethod str sum super tuple type unichr unicode vars
xrange zip
# Builtin exceptions and warnings.
BaseException Exception StandardError ArithmeticError LookupError
EnvironmentError
AssertionError AttributeError EOFError FloatingPointError GeneratorExit IOError
ImportError IndexError KeyError KeyboardInterrupt MemoryError NameError
NotImplementedError OSError OverflowError ReferenceError RuntimeError
StopIteration SyntaxError IndentationError TabError SystemError SystemExit
TypeError UnboundLocalError UnicodeError UnicodeEncodeError UnicodeDecodeError
UnicodeTranslateError ValueError WindowsError ZeroDivisionError
Warning UserWarning DeprecationWarning PendingDepricationWarning SyntaxWarning
RuntimeWarning FutureWarning ImportWarning UnicodeWarning
# Decorators.
@ decoratorname
@ object.__init__(arg1, arg2)
# Numbers
0 1 2 9 10 0x1f .3 12.34 0j 0j 34.2E-3 0b10 0o77 1023434 0x0
# Erroneous numbers
077 100L 0xfffffffL 0L 08 0xk 0x 0b102 0o78 0o123LaB
# Strings
" test " ' test '
"""
test
"""
'''
test
'''
" \a\b\c\"\'\n\r \x34\077 \08 \xag"
r" \" \' "
"testтест"
b"test"
b"test\r\n\xffff"
b"тестtest"
br"test"
br"\a\b\n\r"
# Formattings
" %f "
b" %f "
"{0.name!r:b} {0[n]} {name!s: } {{test}} {{}} {} {.__len__:s}"
b"{0.name!r:b} {0[n]} {name!s: } {{test}} {{}} {} {.__len__:s}"
"${test} ${test ${test}aname $$$ $test+nope"
b"${test} ${test ${test}aname $$$ $test+nope"
# Doctests.
"""
Test:
>>> a = 5
>>> a
5
Test
"""
'''
Test:
>>> a = 5
>>> a
5
Test
'''
# Erroneous symbols or bad variable names.
$ ? 6xav
&& || ===
# Indentation errors.
break
# Trailing space errors.
break
"""
test
"""
| mumuxme/vim-config | test/test.py | Python | gpl-3.0 | 2,612 | 0.005393 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-11-22 22:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('socialnet', '0029_auto_20161121_0543'),
]
operations = [
migrations.AddField(
model_name='author',
name='displayname',
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| CMPUT404F16T06/CMPUT404Project | mysite/socialnet/migrations/0030_author_displayname.py | Python | apache-2.0 | 480 | 0 |
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.exceptions import ImproperlyConfigured
from django.core.mail import EmailMessage, EmailMultiAlternatives
import base64
from postmark.core import PMMail, PMBatchMail
class PMEmailMessage(EmailMessage):
def __init__(self, *args, **kwargs):
if 'tag' in kwargs:
self.tag = kwargs['tag']
del kwargs['tag']
else:
self.tag = None
if 'track_opens' in kwargs:
self.track_opens = kwargs['track_opens']
del kwargs['track_opens']
else:
self.track_opens = getattr(settings, 'POSTMARK_TRACK_OPENS', False)
if 'message_stream' in kwargs:
self.message_stream = kwargs['message_stream']
del kwargs['message_stream']
else:
self.message_stream = None
super(PMEmailMessage, self).__init__(*args, **kwargs)
class PMEmailMultiAlternatives(EmailMultiAlternatives):
def __init__(self, *args, **kwargs):
if 'tag' in kwargs:
self.tag = kwargs['tag']
del kwargs['tag']
else:
self.tag = None
if 'track_opens' in kwargs:
self.track_opens = kwargs['track_opens']
del kwargs['track_opens']
else:
self.track_opens = getattr(settings, 'POSTMARK_TRACK_OPENS', False)
if 'message_stream' in kwargs:
self.message_stream = kwargs['message_stream']
del kwargs['message_stream']
else:
self.message_stream = None
super(PMEmailMultiAlternatives, self).__init__(*args, **kwargs)
class EmailBackend(BaseEmailBackend):
def __init__(self, api_key=None, default_sender=None, **kwargs):
"""
Initialize the backend.
"""
super(EmailBackend, self).__init__(**kwargs)
self.api_key = api_key if api_key is not None else getattr(settings, 'POSTMARK_API_KEY', None)
if self.api_key is None:
raise ImproperlyConfigured('POSTMARK API key must be set in Django settings file or passed to backend constructor.')
self.default_sender = getattr(settings, 'POSTMARK_SENDER', default_sender)
self.test_mode = getattr(settings, 'POSTMARK_TEST_MODE', False)
self.return_message_id = getattr(settings, 'POSTMARK_RETURN_MESSAGE_ID', False)
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not email_messages:
return
sent, instance = self._send(email_messages)
if sent and self.return_message_id:
return [m.message_id for m in instance.messages]
elif sent:
return len(email_messages)
return 0
def _build_message(self, message):
"""A helper method to convert a PMEmailMessage to a PMMail"""
if not message.recipients():
return False
recipients = ','.join(message.to)
recipients_cc = ','.join(message.cc)
recipients_bcc = ','.join(message.bcc)
text_body = message.body
html_body = None
if isinstance(message, EmailMultiAlternatives):
for alt in message.alternatives:
if alt[1] == "text/html":
html_body = alt[0]
break
if getattr(message, 'content_subtype', None) == 'html':
# Don't send html content as plain text
text_body = None
html_body = message.body
reply_to = ','.join(message.reply_to)
custom_headers = {}
if message.extra_headers and isinstance(message.extra_headers, dict):
if 'Reply-To' in message.extra_headers:
reply_to = message.extra_headers.pop('Reply-To')
if len(message.extra_headers):
custom_headers = message.extra_headers
attachments = []
if message.attachments and isinstance(message.attachments, list):
if len(message.attachments):
for item in message.attachments:
if isinstance(item, tuple):
(f, content, m) = item
if isinstance(content, str):
content = content.encode()
content = base64.b64encode(content)
# b64decode returns bytes on Python 3. PMMail needs a
# str (for JSON serialization). Convert on Python 3
# only to avoid a useless performance hit on Python 2.
if not isinstance(content, str):
content = content.decode()
attachments.append((f, content, m))
else:
attachments.append(item)
message_stream = getattr(message, 'message_stream', None)
postmark_message = PMMail(api_key=self.api_key,
subject=message.subject,
sender=message.from_email,
to=recipients,
cc=recipients_cc,
bcc=recipients_bcc,
text_body=text_body,
html_body=html_body,
reply_to=reply_to,
custom_headers=custom_headers,
attachments=attachments,
message_stream=message_stream)
postmark_message.tag = getattr(message, 'tag', None)
postmark_message.track_opens = getattr(message, 'track_opens', False)
return postmark_message
def _send(self, messages):
"""A helper method that does the actual sending."""
if len(messages) == 1:
to_send = self._build_message(messages[0])
if to_send is False:
# The message was missing recipients.
# Bail.
return False, None
else:
pm_messages = list(map(self._build_message, messages))
pm_messages = [m for m in pm_messages if m]
if len(pm_messages) == 0:
# If after filtering, there aren't any messages
# to send, bail.
return False, None
to_send = PMBatchMail(messages=pm_messages)
try:
to_send.send(test=self.test_mode)
except:
if self.fail_silently:
return False, to_send
raise
return True, to_send
| themartorana/python-postmark | postmark/django_backend.py | Python | mit | 6,739 | 0.00089 |
import os
import os.path as osp
import shutil
import subprocess
from .. api import Installer, parse_version
from .. toolbox import find_executable, pushd
class StowInstaller(Installer):
name = 'stow'
def __init__(self, provider, release, config):
super(StowInstaller, self).__init__(provider, release, config)
self.path = config['path']
self.pkg_path = osp.join(self.path, 'stow')
self.activate = config.get('activate', True)
self.executable = find_executable(
*config.get('stow', ('stow', 'xstow'))
)
def release_dir_name(self, version=''):
return '{}-{}'.format(self.release.pkg_name, version)
def get_local_versions(self):
versions = []
if not osp.isdir(self.pkg_path):
return versions
for p in os.listdir(self.pkg_path):
fp = osp.join(self.pkg_path, p)
if osp.isdir(fp) and p.startswith(self.release_dir_name()):
versions.append(p[len(self.release_dir_name()):])
return versions
def installed_version(self):
"""
:return: most recent version available in stow packages directory.
:rtype: string
"""
installed_version = reduce(
lambda a, b: a if a > b else b,
map(
lambda v: (parse_version(v), v),
self.get_local_versions()
),
(None, None)
)
return installed_version[1]
def _stow(self, *args):
with pushd(self.pkg_path):
subprocess.check_call([self.executable] + list(args))
def disable_package(self, version):
self._stow('-D', self.release_dir_name(version))
def enable_package(self, version):
self._stow(self.release_dir_name(version))
def install(self, fetched_items_path, version):
rdir_name = self.release_dir_name(version)
release_path = osp.join(self.pkg_path, rdir_name)
if not osp.isdir(self.pkg_path):
os.makedirs(self.pkg_path)
elif osp.isdir(release_path):
raise Exception(
"Cannot install {}/{} in {}: directory exists".format(
self.provider.name,
self.release.name,
release_path
)
)
shutil.copytree(fetched_items_path, release_path)
if self.activate:
versions_to_disable = set(self.get_local_versions())
versions_to_disable.remove(version)
for v in versions_to_disable:
self.disable_package(v)
self.enable_package(version)
| cogniteev/easy-upgrade | easy_upgrade/lib/stow.py | Python | apache-2.0 | 2,651 | 0 |
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
import os
from PyQt4 import QtGui, Qt, QtCore
from opus_gui.general_manager.views.ui_dependency_viewer import Ui_DependencyViewer
class DependencyViewer(QtGui.QDialog, Ui_DependencyViewer):
def __init__(self, parent_window):
flags = QtCore.Qt.WindowTitleHint | QtCore.Qt.WindowSystemMenuHint | QtCore.Qt.WindowMaximizeButtonHint
QtGui.QDialog.__init__(self, parent_window, flags)
self.setupUi(self)
self.setModal(True) #TODO: this shouldn't be necessary, but without it the window is unresponsive
def show_error_message(self):
self.lbl_error.setVisible(True)
self.scrollArea.setVisible(False)
def show_graph(self, file_path, name):
self.lbl_error.setVisible(False)
self.scrollArea.setVisible(True)
self.setWindowTitle("Dependency graph of %s" % name)
self.image_file = file_path
pix = QtGui.QPixmap.fromImage(QtGui.QImage(file_path))
self.label.setPixmap(pix)
self.scrollAreaWidgetContents.setMinimumSize(pix.width(), pix.height())
self.label.setMinimumSize(pix.width(), pix.height())
rect = Qt.QApplication.desktop().screenGeometry(self)
self.resize(min(rect.width(), pix.width() + 35), min(rect.height(), pix.height() + 80))
self.update()
def on_closeWindow_released(self):
self.close()
os.remove(self.image_file)
| christianurich/VIBe2UrbanSim | 3rdparty/opus/src/opus_gui/general_manager/controllers/dependency_viewer.py | Python | gpl-2.0 | 1,509 | 0.003976 |
"""
These are the functions used to process medline (pubmed) files at the backend. They are meant for use internal use by metaknowledge.
"""
from .recordMedline import MedlineRecord, medlineRecordParser
from .medlineHandlers import isMedlineFile, medlineParser
from .tagProcessing.tagNames import tagNameDict, authorBasedTags, tagNameConverterDict
from .tagProcessing.specialFunctions import medlineSpecialTagToFunc
from .tagProcessing.tagFunctions import *
| networks-lab/metaknowledge | metaknowledge/medline/__init__.py | Python | gpl-2.0 | 458 | 0.004367 |
# coding=utf-8
"""Module of Data Types (eg. Temperature, Area, etc.)
Possesses capabilities for unit conversions and range checks.
It also includes descriptions of the data types and the units.
Properties:
TYPES: A tuple indicating all currently supported data types.
BASETYPES: A tuple indicating all base types. Base types are the
data types on which unit systems are defined.
UNITS: A dictionary containing all currently supported units. The
keys of this dictionary are the base type names (eg. 'Temperature').
TYPESDICT: A dictionary containing pointers to the classes of each data type.
The keys of this dictionary are the data type names.
"""
from .base import _DataTypeEnumeration
_data_types = _DataTypeEnumeration(import_modules=True)
TYPES = _data_types.types
BASETYPES = _data_types.base_types
UNITS = _data_types.units
TYPESDICT = _data_types.types_dict
| ladybug-analysis-tools/ladybug-core | ladybug/datatype/__init__.py | Python | gpl-3.0 | 901 | 0.00111 |
from engine.services.lib.debug import check_if_debugging
check_if_debugging()
import inspect
import logging
import os
import sys
import traceback
from logging.handlers import RotatingFileHandler
from subprocess import check_call, check_output
from flask import Flask
## Moved populate & upgrade from webapp
from initdb.populate import Populate
from initdb.upgrade import Upgrade
from pid import PidFile, PidFileAlreadyLockedError, PidFileAlreadyRunningError
check_output(("/isard/generate_certs.sh"), text=True).strip()
try:
p = Populate()
except Exception as e:
print(traceback.format_exc())
print("Error populating...")
exit(1)
try:
u = Upgrade()
except Exception as e:
print(traceback.format_exc())
print("Error Upgrading...")
exit(1)
## End
from engine.services.lib.functions import check_tables_populated
check_tables_populated()
from engine.models.engine import Engine
from engine.services import db
def run(app):
http_server = WSGIServer(("0.0.0.0", 5555), app)
http_server.serve_forever()
# if app.debug:
# from werkzeug.debug import DebuggedApplication
# app.wsgi_app = DebuggedApplication( app.wsgi_app, True )
if __name__ == "__main__":
p = PidFile("engine")
try:
p.create()
except PidFileAlreadyLockedError:
import time
err_pid = PidFile(str(time.time()))
err_pid.create()
while True:
time.sleep(1)
app = Flask(__name__)
app.m = Engine(with_status_threads=False)
app.db = db
# remove default logging for get/post messages
werk = logging.getLogger("werkzeug")
werk.setLevel(logging.ERROR)
# add log handler
handler = RotatingFileHandler("api.log", maxBytes=10000, backupCount=1)
handler.setLevel(logging.INFO)
app.logger.addHandler(handler)
# register blueprints
from engine.api import api as api_blueprint
app.register_blueprint(api_blueprint, url_prefix="") # url_prefix /api?
# run(app)
if os.environ.get("LOG_LEVEL") == "DEBUG":
app.run(debug=True, host="0.0.0.0")
else:
app.run(host="0.0.0.0")
| isard-vdi/isard | engine/engine/start.py | Python | agpl-3.0 | 2,127 | 0.007522 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import settings
#----------------------------------------
# globals
#----------------------------------------
web = settings.web
stow = web.utils.storage
#----------------------------------------
# 初始化菜单 权限控制
#----------------------------------------
def init_menus():
_ = web.config.get('_gettext')
menus = [
stow({
'id':'base',
'title':_("menu_system_base"),
'menus':[
#stow({
# 'id':'menusystemstatus', #系统状态
# 'icon_class':'icon-speedometer',
# 'title':_("menu_system_status"),
# 'content':[
# stow({'name':_('system_status'),'url':"javascript:load_page('systemstatus')",'id':'systemstatus'}),
# stow({'name':_('service_control'),'url':"javascript:load_page('servicecontrol')",'id':'servicecontrol'}),
# ]
#}),
stow({
'id':'menunetwork', #网络设置
'icon_class':'icon-network',
'title':_("menu_network"),
'content':[
stow({'name':_('iface_manage'),'url':"javascript:load_page('iface')",'id':'iface'}),
stow({'name':_('dns_manage'),'url':"javascript:load_page('dns')",'id':'dns'}),
stow({'name':_('route_manage'),'url':"javascript:load_page('route')",'id':'route'}),
]
}),
stow({
'id':'menustorage', #存储设置
'icon_class':'icon-storage',
'title':_("menu_storage"),
'content':[
stow({'name':_('mdraid_manage'),'url':"javascript:load_page('mdraid')",'id':'mdraid'}),
stow({'name':_('lvm_manage'),'url':"javascript:load_page('lvm')",'id':'lvm'})
]
})
]
}),
#stow({
# 'id':'nas',
# 'title':_("menu_nas"),
# 'menus':[
#
# ]
#}),
stow({
'id':'san',
'title':_("menu_san"),
'menus':[
stow({
'id':'menusanbase', #SAN基础配置
'icon_class':'icon-speedometer',
'title':_("menu_san_base"),
'content':[
stow({'name':_('ipsan'),'url':"javascript:load_page('ipsan')",'id':'ipsan'}),
stow({'name':_('fcsan'),'url':"javascript:load_page('fcsan')",'id':'fcsan'}),
]
}),
#stow({
# 'id':'menusanoptional', #SAN高级配置
# 'icon_class':'icon-speedometer',
# 'title':_("menu_san_optional"),
# 'content':[
# stow({'name':_('ipsan'),'url':"javascript:load_page('ipsan')",'id':'ipsan'}),
# stow({'name':_('fcsan'),'url':"javascript:load_page('fcsan')",'id':'fcsan'}),
# ]
#})
]
})
]
return menus
| liangtianyou/ST | stclient/menus.py | Python | gpl-3.0 | 3,290 | 0.024443 |
from .cluster_pb2 import FLNodeDef, FLClusterDef
from .cluster_spec import FLClusterSpec
| bytedance/fedlearner | fedlearner/fedavg/cluster/__init__.py | Python | apache-2.0 | 90 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
HTTP streaming toolbox with flow control, written in Python.
:copyright: (c) 2014 Runzhou Li (Leo)
:license: The MIT License (MIT), see LICENSE for details.
"""
__title__ = 'tidehunter'
__version__ = '1.0.1'
VERSION = tuple(map(int, __version__.split('.')))
__author__ = 'Runzhou Li (Leo)'
__license__ = 'The MIT License (MIT)'
__copyright__ = 'Runzhou Li (Leo)'
from tidehunter.stream import (
Hunter, SimpleStateCounter
)
__all__ = [
'Hunter', 'SimpleStateCounter'
]
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError: # pragma: no cover
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| woozyking/tidehunter | tidehunter/__init__.py | Python | mit | 858 | 0.002331 |
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import Customer, Session, CompanyProfile
class CustomerUpdateForm(forms.ModelForm):
"""
Update Customer form
Field enhancements:
* therapyCategory uses forms.TypedChoiceField.
"""
class Meta:
model = Customer
fields = ['status', 'firstName', 'lastName', 'additionalName', 'ssn', 'address', 'zipCode', 'city', 'telephone', 'email',
'therapyCategory', 'sessionprice', 'sessionpriceKelaRefund'
# , 'statementpriceKela'
]
labels = {
'firstName': _("Etunimi"),
'additionalName': _("Muut etunimet"),
'lastName': _("Sukunimi"),
'ssn': _("Sosiaaliturvatunnus"),
'address': _("Osoite"),
'zipCode': _("Postinumero"),
'city': _("Postitoimipaikka"),
'telephone': _("Puhelin"),
'email': _("Sähköposti"),
'status': _("Aktiivinen asiakas?"),
'therapyCategory': _("Terapialuokitus"),
'sessionprice': _("Tapaamisen perushinta"),
'sessionpriceKelaRefund': _("Kelan korvaus"),
# 'statementpriceKela': _("Kelakorvaus lausunnosta"),
}
therapyCategory = forms.TypedChoiceField(
label=Meta.labels['therapyCategory'],
choices=Customer.THERAPY_CATEGORY_CHOICES,
widget=forms.Select,
required=True
)
class CustomerCreateForm(forms.ModelForm):
"""
Create Customer form.
Field enhancements:
* therapyCategory uses forms.TypedChoiceField.
"""
class Meta:
model = Customer
fields = ['firstName', 'lastName', 'additionalName', 'ssn', 'address', 'zipCode', 'city', 'telephone', 'email',
'status',
'therapyCategory', 'sessionprice', 'sessionpriceKelaRefund']
labels = {
'firstName': _("Etunimi"),
'additionalName': _("Muut etunimet"),
'lastName': _("Sukunimi"),
'ssn': _("Sosiaaliturvatunnus"),
'address': _("Osoite"),
'zipCode': _("Postinumero"),
'city': _("Postitoimipaikka"),
'telephone': _("Puhelin"),
'email': _("Sähköposti"),
'status': _("Aktiivinen asiakas?"),
'therapyCategory': _("Terapialuokitus"),
'sessionprice': _("Tapaamisen perushinta"),
'sessionpriceKelaRefund': _("Kelan korvaus"),
}
therapyCategory = forms.TypedChoiceField(
label=Meta.labels['therapyCategory'],
choices=Customer.THERAPY_CATEGORY_CHOICES,
widget=forms.Select,
required=True
)
class SessionUpdateForm(forms.ModelForm):
"""
Update Customer form.
Field enhancements:
* time uses forms.TimeField with format '%H:%M'
* sessionInvoiceType uses forms.TypedChoiceField choices from Session.SESSION_INVOICE_CHOICES
* kelaInvoiceType uses forms.TypedChoiceField choices from Session.KELAINVOICABLE_CHOICES
* sessionType uses forms.TypedChoiceField choices from Session.SESSION_TYPE_CHOICES
"""
class Meta:
model = Session
fields = [
'date',
'time',
'sessionType',
'sessionInvoiceType',
'kelaInvoiceType',
'sessionprice',
'sessionpriceKelaRefund',
'sessionDone'
]
labels = {
'date': _("Tapaamispäivä"),
'time': _("Tapaamisaika"),
'sessionType': _("Tapaamisen tyyppi"),
'sessionInvoiceType': _("Tapaamisen laskutus"),
'kelaInvoiceType': _("Maksaako Kela osan asiakkaan kustannuksista?"),
'sessionprice': _("Tapaamisen hinta"),
'sessionpriceKelaRefund': _("Kelan maksama osuus tapaamisen hinnasta"),
'sessionDone': _("Onko tapaaminen pidetty?")
}
time = forms.TimeField(
label=Meta.labels['time'],
widget=forms.TimeInput(format='%H:%M'),
required=True
)
sessionInvoiceType = forms.TypedChoiceField(
label=Meta.labels['sessionInvoiceType'],
choices=Session.SESSION_INVOICE_CHOICES,
widget=forms.Select,
required=True
)
kelaInvoiceType = forms.TypedChoiceField(
label=Meta.labels['kelaInvoiceType'],
choices=Session.KELAINVOICABLE_CHOICES,
widget=forms.Select,
required=True
)
sessionType = forms.TypedChoiceField(
label=Meta.labels['sessionType'],
choices=Session.SESSION_TYPE_CHOICES,
widget=forms.Select,
required=True
)
class CompanyProfileUpdateForm(forms.ModelForm):
"""
CustomerProfile update form.
Field enhancements:
* time uses forms.TimeField with format '%H:%M'
* serviceproviderType uses forms.TypedChoiceField choices from CompanyProfile.SERVICEPROVIDER_TYPE_CHOICES
* invoiceRefType uses forms.TypedChoiceField choices from CompanyProfile.INVOICEREF_TYPE_CHOICES
* taxAdvanceType uses forms.TypedChoiceField choices from CompanyProfile.TAX_ADVANCE_COLLECTION_TYPE_CHOICES
"""
class Meta:
model = CompanyProfile
fields = [
'companyName',
'firstName',
'additionalName',
'lastName',
'address',
'zipCode',
'city',
'country',
'telephone',
'email',
'vatId',
'iban',
'bic',
'serviceproviderType',
'invoiceRefType',
'taxAdvanceType'
]
labels = {
'companyName': _("Oman yrityksen nimi"),
'firstName': _("Etunimi"),
'additionalName': _("Muut etunimet"),
'lastName': _("Sukunimi"),
'address': _("Osoite"),
'zipCode': _("Postinumero"),
'city': _("Postitoimipaikka"),
'country': _("Maa"),
'telephone': _("Puhelin"),
'email': _("Sähköposti"),
'vatId': _("Y-tunnus/Henkilötunnus"),
'iban': _("Pankkitili (IBAN)"),
'bic': _("Pankkiyhteys (BIC)"),
'serviceproviderType': _("Palveluntarjoajatyyppi"),
'invoiceRefType': _("Kelan laskun viitetyyppi"),
'taxAdvanceType': _("Ennakinpidätysperuste")
}
serviceproviderType = forms.TypedChoiceField(
label=Meta.labels['serviceproviderType'],
choices=CompanyProfile.SERVICEPROVIDER_TYPE_CHOICES,
widget=forms.Select,
required=True
)
invoiceRefType = forms.TypedChoiceField(
label=Meta.labels['invoiceRefType'],
choices=CompanyProfile.INVOICEREF_TYPE_CHOICES,
widget=forms.Select,
required=True
)
taxAdvanceType = forms.TypedChoiceField(
label=Meta.labels['taxAdvanceType'],
choices=CompanyProfile.TAX_ADVANCE_COLLECTION_TYPE_CHOICES,
widget=forms.Select,
required=True
)
| ylitormatech/terapialaskutus | therapyinvoicing/customers/forms.py | Python | bsd-3-clause | 7,087 | 0.001978 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common import exception
from heat.common.i18n import _
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
class KeystoneRoleAssignmentMixin(object):
"""Implements role assignments between user/groups and project/domain.
heat_template_version: 2013-05-23
parameters:
... Group or User parameters
group_role:
type: string
description: role
group_role_domain:
type: string
description: group role domain
group_role_project:
type: string
description: group role project
resources:
admin_group:
type: OS::Keystone::Group OR OS::Keystone::User
properties:
... Group or User properties
roles:
- role: {get_param: group_role}
domain: {get_param: group_role_domain}
- role: {get_param: group_role}
project: {get_param: group_role_project}
"""
PROPERTIES = (
ROLES
) = (
'roles'
)
_ROLES_MAPPING_PROPERTIES = (
ROLE, DOMAIN, PROJECT
) = (
'role', 'domain', 'project'
)
mixin_properties_schema = {
ROLES: properties.Schema(
properties.Schema.LIST,
_('List of role assignments.'),
schema=properties.Schema(
properties.Schema.MAP,
_('Map between role with either project or domain.'),
schema={
ROLE: properties.Schema(
properties.Schema.STRING,
_('Keystone role'),
required=True,
constraints=([constraints.
CustomConstraint('keystone.role')])
),
PROJECT: properties.Schema(
properties.Schema.STRING,
_('Keystone project'),
constraints=([constraints.
CustomConstraint('keystone.project')])
),
DOMAIN: properties.Schema(
properties.Schema.STRING,
_('Keystone domain'),
constraints=([constraints.
CustomConstraint('keystone.domain')])
),
}
),
update_allowed=True
)
}
def _add_role_assignments_to_group(self, group_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
group=group_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
group=group_id
)
def _add_role_assignments_to_user(self, user_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
user=user_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.grant(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
user=user_id
)
def _remove_role_assignments_from_group(self, group_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
group=group_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
group=group_id
)
def _remove_role_assignments_from_user(self, user_id, role_assignments):
for role_assignment in self._normalize_to_id(role_assignments):
if role_assignment.get(self.PROJECT) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
project=role_assignment.get(self.PROJECT),
user=user_id
)
elif role_assignment.get(self.DOMAIN) is not None:
self.client().client.roles.revoke(
role=role_assignment.get(self.ROLE),
domain=role_assignment.get(self.DOMAIN),
user=user_id
)
def _normalize_to_id(self, role_assignment_prps):
role_assignments = []
if role_assignment_prps is None:
return role_assignments
for role_assignment in role_assignment_prps:
role = role_assignment.get(self.ROLE)
project = role_assignment.get(self.PROJECT)
domain = role_assignment.get(self.DOMAIN)
role_assignments.append({
self.ROLE: self.client_plugin().get_role_id(role),
self.PROJECT: (self.client_plugin().
get_project_id(project)) if project else None,
self.DOMAIN: (self.client_plugin().
get_domain_id(domain)) if domain else None
})
return role_assignments
def _find_differences(self, updated_prps, stored_prps):
updated_role_project_assignments = []
updated_role_domain_assignments = []
# Split the properties into two set of role assignments
# (project, domain) from updated properties
for role_assignment in updated_prps or []:
if role_assignment.get(self.PROJECT) is not None:
updated_role_project_assignments.append(
'%s:%s' % (
role_assignment[self.ROLE],
role_assignment[self.PROJECT]))
elif (role_assignment.get(self.DOMAIN)
is not None):
updated_role_domain_assignments.append(
'%s:%s' % (role_assignment[self.ROLE],
role_assignment[self.DOMAIN]))
stored_role_project_assignments = []
stored_role_domain_assignments = []
# Split the properties into two set of role assignments
# (project, domain) from updated properties
for role_assignment in (stored_prps or []):
if role_assignment.get(self.PROJECT) is not None:
stored_role_project_assignments.append(
'%s:%s' % (
role_assignment[self.ROLE],
role_assignment[self.PROJECT]))
elif (role_assignment.get(self.DOMAIN)
is not None):
stored_role_domain_assignments.append(
'%s:%s' % (role_assignment[self.ROLE],
role_assignment[self.DOMAIN]))
new_role_assignments = []
removed_role_assignments = []
# NOTE: finding the diff of list of strings is easier by using 'set'
# so properties are converted to string in above sections
# New items
for item in (set(updated_role_project_assignments) -
set(stored_role_project_assignments)):
new_role_assignments.append(
{self.ROLE: item[:item.find(':')],
self.PROJECT: item[item.find(':') + 1:]}
)
for item in (set(updated_role_domain_assignments) -
set(stored_role_domain_assignments)):
new_role_assignments.append(
{self.ROLE: item[:item.find(':')],
self.DOMAIN: item[item.find(':') + 1:]}
)
# Old items
for item in (set(stored_role_project_assignments) -
set(updated_role_project_assignments)):
removed_role_assignments.append(
{self.ROLE: item[:item.find(':')],
self.PROJECT: item[item.find(':') + 1:]}
)
for item in (set(stored_role_domain_assignments) -
set(updated_role_domain_assignments)):
removed_role_assignments.append(
{self.ROLE: item[:item.find(':')],
self.DOMAIN: item[item.find(':') + 1:]}
)
return new_role_assignments, removed_role_assignments
def create_assignment(self, user_id=None, group_id=None):
if self.properties.get(self.ROLES) is not None:
if user_id is not None:
self._add_role_assignments_to_user(
user_id,
self.properties.get(self.ROLES))
elif group_id is not None:
self._add_role_assignments_to_group(
group_id,
self.properties.get(self.ROLES))
def update_assignment(self, prop_diff, user_id=None, group_id=None):
(new_role_assignments,
removed_role_assignments) = self._find_differences(
prop_diff.get(self.ROLES),
self._stored_properties_data.get(self.ROLES))
if len(new_role_assignments) > 0:
if user_id is not None:
self._add_role_assignments_to_user(
user_id,
new_role_assignments)
elif group_id is not None:
self._add_role_assignments_to_group(
group_id,
new_role_assignments)
if len(removed_role_assignments) > 0:
if user_id is not None:
self._remove_role_assignments_from_user(
user_id,
removed_role_assignments)
elif group_id is not None:
self._remove_role_assignments_from_group(
group_id,
removed_role_assignments)
def delete_assignment(self, user_id=None, group_id=None):
if self._stored_properties_data.get(self.ROLES) is not None:
if user_id is not None:
self._remove_role_assignments_from_user(
user_id,
(self._stored_properties_data.
get(self.ROLES)))
elif group_id is not None:
self._remove_role_assignments_from_group(
group_id,
(self._stored_properties_data.
get(self.ROLES)))
def validate_assignment_properties(self):
if self.properties.get(self.ROLES) is not None:
for role_assignment in self.properties.get(self.ROLES):
project = role_assignment.get(self.PROJECT)
domain = role_assignment.get(self.DOMAIN)
if project is not None and domain is not None:
raise exception.ResourcePropertyConflict(self.PROJECT,
self.DOMAIN)
if project is None and domain is None:
msg = _('Either project or domain must be specified for'
' role %s') % role_assignment.get(self.ROLE)
raise exception.StackValidationFailed(message=msg)
class KeystoneUserRoleAssignment(resource.Resource,
KeystoneRoleAssignmentMixin):
"""Resource for granting roles to a user."""
support_status = support.SupportStatus(
version='5.0.0',
message=_('Supported versions: keystone v3'))
default_client_name = 'keystone'
PROPERTIES = (
USER,
) = (
'user',
)
properties_schema = {
USER: properties.Schema(
properties.Schema.STRING,
_('Name or id of keystone user.'),
required=True,
update_allowed=True,
constraints=[constraints.CustomConstraint('keystone.user')]
)
}
properties_schema.update(
KeystoneRoleAssignmentMixin.mixin_properties_schema)
def __init__(self, *args, **kwargs):
super(KeystoneUserRoleAssignment, self).__init__(*args, **kwargs)
@property
def user_id(self):
return (self.client_plugin().get_user_id(
self.properties.get(self.USER)))
def handle_create(self):
self.create_assignment(user_id=self.user_id)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
self.update_assignment(user_id=self.user_id, prop_diff=prop_diff)
def handle_delete(self):
self.delete_assignment(user_id=self.user_id)
def validate(self):
super(KeystoneUserRoleAssignment, self).validate()
self.validate_assignment_properties()
class KeystoneGroupRoleAssignment(resource.Resource,
KeystoneRoleAssignmentMixin):
"""Resource for granting roles to a group."""
support_status = support.SupportStatus(
version='5.0.0',
message=_('Supported versions: keystone v3'))
default_client_name = 'keystone'
PROPERTIES = (
GROUP,
) = (
'group',
)
properties_schema = {
GROUP: properties.Schema(
properties.Schema.STRING,
_('Name or id of keystone group.'),
required=True,
update_allowed=True,
constraints=[constraints.CustomConstraint('keystone.group')]
)
}
properties_schema.update(
KeystoneRoleAssignmentMixin.mixin_properties_schema)
def __init__(self, *args, **kwargs):
super(KeystoneGroupRoleAssignment, self).__init__(*args, **kwargs)
@property
def group_id(self):
return (self.client_plugin().get_group_id(
self.properties.get(self.GROUP)))
def handle_create(self):
self.create_assignment(group_id=self.group_id)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
self.update_assignment(group_id=self.group_id, prop_diff=prop_diff)
def handle_delete(self):
self.delete_assignment(group_id=self.group_id)
def validate(self):
super(KeystoneGroupRoleAssignment, self).validate()
self.validate_assignment_properties()
def resource_mapping():
return {
'OS::Keystone::UserRoleAssignment': KeystoneUserRoleAssignment,
'OS::Keystone::GroupRoleAssignment': KeystoneGroupRoleAssignment
}
| pratikmallya/heat | heat/engine/resources/openstack/keystone/role_assignments.py | Python | apache-2.0 | 15,791 | 0 |
# Copyright (C) 2007-2018 David Aguilar and contributors
"""Miscellaneous Qt utility functions."""
from __future__ import division, absolute_import, unicode_literals
import os
from qtpy import compat
from qtpy import QtGui
from qtpy import QtCore
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from qtpy.QtCore import Signal
from . import core
from . import hotkeys
from . import icons
from . import utils
from .i18n import N_
from .compat import int_types
from .compat import ustr
from .models import prefs
from .widgets import defs
STRETCH = object()
SKIPPED = object()
def active_window():
"""Return the active window for the current application"""
return QtWidgets.QApplication.activeWindow()
def connect_action(action, fn):
"""Connect an action to a function"""
action.triggered[bool].connect(lambda x: fn())
def connect_action_bool(action, fn):
"""Connect a triggered(bool) action to a function"""
action.triggered[bool].connect(fn)
def connect_button(button, fn):
"""Connect a button to a function"""
# Some versions of Qt send the `bool` argument to the clicked callback,
# and some do not. The lambda consumes all callback-provided arguments.
button.clicked.connect(lambda *args, **kwargs: fn())
def connect_checkbox(widget, fn):
"""Connect a checkbox to a function taking bool"""
widget.clicked.connect(lambda *args, **kwargs: fn(get(checkbox)))
def connect_released(button, fn):
"""Connect a button to a function"""
button.released.connect(fn)
def button_action(button, action):
"""Make a button trigger an action"""
connect_button(button, action.trigger)
def connect_toggle(toggle, fn):
"""Connect a toggle button to a function"""
toggle.toggled.connect(fn)
def disconnect(signal):
"""Disconnect signal from all slots"""
try:
signal.disconnect()
except TypeError: # allow unconnected slots
pass
def get(widget):
"""Query a widget for its python value"""
if hasattr(widget, 'isChecked'):
value = widget.isChecked()
elif hasattr(widget, 'value'):
value = widget.value()
elif hasattr(widget, 'text'):
value = widget.text()
elif hasattr(widget, 'toPlainText'):
value = widget.toPlainText()
elif hasattr(widget, 'sizes'):
value = widget.sizes()
elif hasattr(widget, 'date'):
value = widget.date().toString(Qt.ISODate)
else:
value = None
return value
def hbox(margin, spacing, *items):
"""Create an HBoxLayout with the specified sizes and items"""
return box(QtWidgets.QHBoxLayout, margin, spacing, *items)
def vbox(margin, spacing, *items):
"""Create a VBoxLayout with the specified sizes and items"""
return box(QtWidgets.QVBoxLayout, margin, spacing, *items)
def buttongroup(*items):
"""Create a QButtonGroup for the specified items"""
group = QtWidgets.QButtonGroup()
for i in items:
group.addButton(i)
return group
def set_margin(layout, margin):
"""Set the content margins for a layout"""
layout.setContentsMargins(margin, margin, margin, margin)
def box(cls, margin, spacing, *items):
"""Create a QBoxLayout with the specified sizes and items"""
stretch = STRETCH
skipped = SKIPPED
layout = cls()
layout.setSpacing(spacing)
set_margin(layout, margin)
for i in items:
if isinstance(i, QtWidgets.QWidget):
layout.addWidget(i)
elif isinstance(i, (QtWidgets.QHBoxLayout, QtWidgets.QVBoxLayout,
QtWidgets.QFormLayout, QtWidgets.QLayout)):
layout.addLayout(i)
elif i is stretch:
layout.addStretch()
elif i is skipped:
continue
elif isinstance(i, int_types):
layout.addSpacing(i)
return layout
def form(margin, spacing, *widgets):
"""Create a QFormLayout with the specified sizes and items"""
layout = QtWidgets.QFormLayout()
layout.setSpacing(spacing)
layout.setFieldGrowthPolicy(QtWidgets.QFormLayout.ExpandingFieldsGrow)
set_margin(layout, margin)
for idx, (name, widget) in enumerate(widgets):
if isinstance(name, (str, ustr)):
layout.addRow(name, widget)
else:
layout.setWidget(idx, QtWidgets.QFormLayout.LabelRole, name)
layout.setWidget(idx, QtWidgets.QFormLayout.FieldRole, widget)
return layout
def grid(margin, spacing, *widgets):
"""Create a QGridLayout with the specified sizes and items"""
layout = QtWidgets.QGridLayout()
layout.setSpacing(spacing)
set_margin(layout, margin)
for row in widgets:
item = row[0]
if isinstance(item, QtWidgets.QWidget):
layout.addWidget(*row)
elif isinstance(item, QtWidgets.QLayoutItem):
layout.addItem(*row)
return layout
def splitter(orientation, *widgets):
"""Create a spliter over the specified widgets
:param orientation: Qt.Horizontal or Qt.Vertical
"""
layout = QtWidgets.QSplitter()
layout.setOrientation(orientation)
layout.setHandleWidth(defs.handle_width)
layout.setChildrenCollapsible(True)
for idx, widget in enumerate(widgets):
layout.addWidget(widget)
layout.setStretchFactor(idx, 1)
# Workaround for Qt not setting the WA_Hover property for QSplitter
# Cf. https://bugreports.qt.io/browse/QTBUG-13768
layout.handle(1).setAttribute(Qt.WA_Hover)
return layout
def label(text=None, align=None, fmt=None, selectable=True):
"""Create a QLabel with the specified properties"""
widget = QtWidgets.QLabel()
if align is not None:
widget.setAlignment(align)
if fmt is not None:
widget.setTextFormat(fmt)
if selectable:
widget.setTextInteractionFlags(Qt.TextBrowserInteraction)
widget.setOpenExternalLinks(True)
if text:
widget.setText(text)
return widget
class ComboBox(QtWidgets.QComboBox):
"""Custom read-only combobox with a convenient API"""
def __init__(self, items=None, editable=False, parent=None, transform=None):
super(ComboBox, self).__init__(parent)
self.setEditable(editable)
self.transform = transform
self.item_data = []
if items:
self.addItems(items)
self.item_data.extend(items)
def set_index(self, idx):
idx = utils.clamp(idx, 0, self.count()-1)
self.setCurrentIndex(idx)
def add_item(self, text, data):
self.addItem(text)
self.item_data.append(data)
def current_data(self):
return self.item_data[self.currentIndex()]
def set_value(self, value):
if self.transform:
value = self.transform(value)
try:
index = self.item_data.index(value)
except ValueError:
index = 0
self.setCurrentIndex(index)
def combo(items, editable=False, parent=None):
"""Create a readonly (by default) combobox from a list of items"""
return ComboBox(editable=editable, items=items, parent=parent)
def combo_mapped(data, editable=False, transform=None, parent=None):
"""Create a readonly (by default) combobox from a list of items"""
widget = ComboBox(editable=editable, transform=transform, parent=parent)
for (k, v) in data:
widget.add_item(k, v)
return widget
def textbrowser(text=None):
"""Create a QTextBrowser for the specified text"""
widget = QtWidgets.QTextBrowser()
widget.setOpenExternalLinks(True)
if text:
widget.setText(text)
return widget
def add_completer(widget, items):
"""Add simple completion to a widget"""
completer = QtWidgets.QCompleter(items, widget)
completer.setCaseSensitivity(Qt.CaseInsensitive)
completer.setCompletionMode(QtWidgets.QCompleter.InlineCompletion)
widget.setCompleter(completer)
def prompt(msg, title=None, text='', parent=None):
"""Presents the user with an input widget and returns the input."""
if title is None:
title = msg
if parent is None:
parent = active_window()
result = QtWidgets.QInputDialog.getText(
parent, title, msg, QtWidgets.QLineEdit.Normal, text)
return (result[0], result[1])
def prompt_n(msg, inputs):
"""Presents the user with N input widgets and returns the results"""
dialog = QtWidgets.QDialog(active_window())
dialog.setWindowModality(Qt.WindowModal)
dialog.setWindowTitle(msg)
long_value = msg
for k, v in inputs:
if len(k + v) > len(long_value):
long_value = k + v
metrics = QtGui.QFontMetrics(dialog.font())
min_width = metrics.width(long_value) + 100
if min_width > 720:
min_width = 720
dialog.setMinimumWidth(min_width)
ok_b = ok_button(msg, enabled=False)
close_b = close_button()
form_widgets = []
def get_values():
return [pair[1].text().strip() for pair in form_widgets]
for name, value in inputs:
lineedit = QtWidgets.QLineEdit()
# Enable the OK button only when all fields have been populated
lineedit.textChanged.connect(
lambda x: ok_b.setEnabled(all(get_values())))
if value:
lineedit.setText(value)
form_widgets.append((name, lineedit))
# layouts
form_layout = form(defs.no_margin, defs.button_spacing, *form_widgets)
button_layout = hbox(defs.no_margin, defs.button_spacing,
STRETCH, close_b, ok_b)
main_layout = vbox(defs.margin, defs.button_spacing,
form_layout, button_layout)
dialog.setLayout(main_layout)
# connections
connect_button(ok_b, dialog.accept)
connect_button(close_b, dialog.reject)
accepted = dialog.exec_() == QtWidgets.QDialog.Accepted
text = get_values()
ok = accepted and all(text)
return (ok, text)
class TreeWidgetItem(QtWidgets.QTreeWidgetItem):
TYPE = QtGui.QStandardItem.UserType + 101
def __init__(self, path, icon, deleted):
QtWidgets.QTreeWidgetItem.__init__(self)
self.path = path
self.deleted = deleted
self.setIcon(0, icons.from_name(icon))
self.setText(0, path)
def type(self):
return self.TYPE
def paths_from_indexes(model, indexes,
item_type=TreeWidgetItem.TYPE,
item_filter=None):
"""Return paths from a list of QStandardItemModel indexes"""
items = [model.itemFromIndex(i) for i in indexes]
return paths_from_items(items, item_type=item_type, item_filter=item_filter)
def _true_filter(_x):
return True
def paths_from_items(items,
item_type=TreeWidgetItem.TYPE,
item_filter=None):
"""Return a list of paths from a list of items"""
if item_filter is None:
item_filter = _true_filter
return [i.path for i in items
if i.type() == item_type and item_filter(i)]
def tree_selection(tree_item, items):
"""Returns an array of model items that correspond to the selected
QTreeWidgetItem children"""
selected = []
count = min(tree_item.childCount(), len(items))
for idx in range(count):
if tree_item.child(idx).isSelected():
selected.append(items[idx])
return selected
def tree_selection_items(tree_item):
"""Returns selected widget items"""
selected = []
for idx in range(tree_item.childCount()):
child = tree_item.child(idx)
if child.isSelected():
selected.append(child)
return selected
def selected_item(list_widget, items):
"""Returns the model item that corresponds to the selected QListWidget
row."""
widget_items = list_widget.selectedItems()
if not widget_items:
return None
widget_item = widget_items[0]
row = list_widget.row(widget_item)
if row < len(items):
item = items[row]
else:
item = None
return item
def selected_items(list_widget, items):
"""Returns an array of model items that correspond to the selected
QListWidget rows."""
item_count = len(items)
selected = []
for widget_item in list_widget.selectedItems():
row = list_widget.row(widget_item)
if row < item_count:
selected.append(items[row])
return selected
def open_file(title, directory=None):
"""Creates an Open File dialog and returns a filename."""
result = compat.getopenfilename(parent=active_window(),
caption=title,
basedir=directory)
return result[0]
def open_files(title, directory=None, filters=''):
"""Creates an Open File dialog and returns a list of filenames."""
result = compat.getopenfilenames(parent=active_window(),
caption=title,
basedir=directory,
filters=filters)
return result[0]
def opendir_dialog(caption, path):
"""Prompts for a directory path"""
options = (QtWidgets.QFileDialog.ShowDirsOnly |
QtWidgets.QFileDialog.DontResolveSymlinks)
return compat.getexistingdirectory(parent=active_window(),
caption=caption,
basedir=path,
options=options)
def save_as(filename, title='Save As...'):
"""Creates a Save File dialog and returns a filename."""
result = compat.getsavefilename(parent=active_window(),
caption=title,
basedir=filename)
return result[0]
def copy_path(filename, absolute=True):
"""Copy a filename to the clipboard"""
if filename is None:
return
if absolute:
filename = core.abspath(filename)
set_clipboard(filename)
def set_clipboard(text):
"""Sets the copy/paste buffer to text."""
if not text:
return
clipboard = QtWidgets.QApplication.clipboard()
clipboard.setText(text, QtGui.QClipboard.Clipboard)
clipboard.setText(text, QtGui.QClipboard.Selection)
persist_clipboard()
# pylint: disable=line-too-long
def persist_clipboard():
"""Persist the clipboard
X11 stores only a reference to the clipboard data.
Send a clipboard event to force a copy of the clipboard to occur.
This ensures that the clipboard is present after git-cola exits.
Otherwise, the reference is destroyed on exit.
C.f. https://stackoverflow.com/questions/2007103/how-can-i-disable-clear-of-clipboard-on-exit-of-pyqt4-application
""" # noqa
clipboard = QtWidgets.QApplication.clipboard()
event = QtCore.QEvent(QtCore.QEvent.Clipboard)
QtWidgets.QApplication.sendEvent(clipboard, event)
def add_action_bool(widget, text, fn, checked, *shortcuts):
tip = text
action = _add_action(widget, text, tip, fn,
connect_action_bool, *shortcuts)
action.setCheckable(True)
action.setChecked(checked)
return action
def add_action(widget, text, fn, *shortcuts):
tip = text
return _add_action(widget, text, tip, fn, connect_action, *shortcuts)
def add_action_with_status_tip(widget, text, tip, fn, *shortcuts):
return _add_action(widget, text, tip, fn, connect_action, *shortcuts)
def _add_action(widget, text, tip, fn, connect, *shortcuts):
action = QtWidgets.QAction(text, widget)
if hasattr(action, 'setIconVisibleInMenu'):
action.setIconVisibleInMenu(True)
if tip:
action.setStatusTip(tip)
connect(action, fn)
if shortcuts:
action.setShortcuts(shortcuts)
if hasattr(Qt, 'WidgetWithChildrenShortcut'):
action.setShortcutContext(Qt.WidgetWithChildrenShortcut)
widget.addAction(action)
return action
def set_selected_item(widget, idx):
"""Sets a the currently selected item to the item at index idx."""
if isinstance(widget, QtWidgets.QTreeWidget):
item = widget.topLevelItem(idx)
if item:
item.setSelected(True)
widget.setCurrentItem(item)
def add_items(widget, items):
"""Adds items to a widget."""
for item in items:
if item is None:
continue
widget.addItem(item)
def set_items(widget, items):
"""Clear the existing widget contents and set the new items."""
widget.clear()
add_items(widget, items)
def create_treeitem(filename, staged=False, deleted=False, untracked=False):
"""Given a filename, return a TreeWidgetItem for a status widget
"staged", "deleted, and "untracked" control which icon is used.
"""
icon_name = icons.status(filename, deleted, staged, untracked)
return TreeWidgetItem(filename, icons.name_from_basename(icon_name),
deleted=deleted)
def add_close_action(widget):
"""Adds close action and shortcuts to a widget."""
return add_action(widget, N_('Close...'),
widget.close, hotkeys.CLOSE, hotkeys.QUIT)
def app():
"""Return the current application"""
return QtWidgets.QApplication.instance()
def desktop():
"""Return the desktop"""
return app().desktop()
def desktop_size():
desk = desktop()
rect = desk.screenGeometry(QtGui.QCursor().pos())
return (rect.width(), rect.height())
def center_on_screen(widget):
"""Move widget to the center of the default screen"""
width, height = desktop_size()
cx = width // 2
cy = height // 2
widget.move(cx - widget.width()//2, cy - widget.height()//2)
def default_size(parent, width, height, use_parent_height=True):
"""Return the parent's size, or the provided defaults"""
if parent is not None:
width = parent.width()
if use_parent_height:
height = parent.height()
return (width, height)
def default_monospace_font():
if utils.is_darwin():
family = 'Monaco'
else:
family = 'Monospace'
mfont = QtGui.QFont()
mfont.setFamily(family)
return mfont
def diff_font_str(context):
cfg = context.cfg
font_str = cfg.get(prefs.FONTDIFF)
if not font_str:
font_str = default_monospace_font().toString()
return font_str
def diff_font(context):
return font(diff_font_str(context))
def font(string):
qfont = QtGui.QFont()
qfont.fromString(string)
return qfont
def create_button(text='', layout=None, tooltip=None, icon=None,
enabled=True, default=False):
"""Create a button, set its title, and add it to the parent."""
button = QtWidgets.QPushButton()
button.setCursor(Qt.PointingHandCursor)
button.setFocusPolicy(Qt.NoFocus)
if text:
button.setText(' ' + text)
if icon is not None:
button.setIcon(icon)
button.setIconSize(QtCore.QSize(defs.small_icon, defs.small_icon))
if tooltip is not None:
button.setToolTip(tooltip)
if layout is not None:
layout.addWidget(button)
if not enabled:
button.setEnabled(False)
if default:
button.setDefault(True)
return button
def tool_button():
"""Create a flat border-less button"""
button = QtWidgets.QToolButton()
button.setPopupMode(QtWidgets.QToolButton.InstantPopup)
button.setCursor(Qt.PointingHandCursor)
button.setFocusPolicy(Qt.NoFocus)
button.setStyleSheet("""
/* No borders */
QToolButton {
border: 0;
border-style: none;
}
/* Hide the menu indicator */
QToolButton::menu-indicator {
image: none;
}
""")
return button
def create_action_button(tooltip=None, icon=None):
"""Create a small toolbutton for use in dock title widgets"""
button = tool_button()
if tooltip is not None:
button.setToolTip(tooltip)
if icon is not None:
button.setIcon(icon)
button.setIconSize(QtCore.QSize(defs.small_icon, defs.small_icon))
return button
def ok_button(text, default=True, enabled=True, icon=None):
if icon is None:
icon = icons.ok()
return create_button(text=text, icon=icon, default=default, enabled=enabled)
def close_button(text=None, icon=None):
text = text or N_('Close')
icon = icons.mkicon(icon, icons.close)
return create_button(text=text, icon=icon)
def edit_button(enabled=True, default=False):
return create_button(text=N_('Edit'), icon=icons.edit(),
enabled=enabled, default=default)
def refresh_button(enabled=True, default=False):
return create_button(text=N_('Refresh'), icon=icons.sync(),
enabled=enabled, default=default)
def checkbox(text='', tooltip='', checked=None):
"""Create a checkbox"""
return _checkbox(QtWidgets.QCheckBox, text, tooltip, checked)
def radio(text='', tooltip='', checked=None):
"""Create a radio button"""
return _checkbox(QtWidgets.QRadioButton, text, tooltip, checked)
def _checkbox(cls, text, tooltip, checked):
"""Create a widget and apply properties"""
widget = cls()
if text:
widget.setText(text)
if tooltip:
widget.setToolTip(tooltip)
if checked is not None:
widget.setChecked(checked)
return widget
class DockTitleBarWidget(QtWidgets.QFrame):
def __init__(self, parent, title, stretch=True):
QtWidgets.QFrame.__init__(self, parent)
self.setAutoFillBackground(True)
self.label = qlabel = QtWidgets.QLabel(title, self)
qfont = qlabel.font()
qfont.setBold(True)
qlabel.setFont(qfont)
qlabel.setCursor(Qt.OpenHandCursor)
self.close_button = create_action_button(
tooltip=N_('Close'), icon=icons.close())
self.toggle_button = create_action_button(
tooltip=N_('Detach'), icon=icons.external())
self.corner_layout = hbox(defs.no_margin, defs.spacing)
if stretch:
separator = STRETCH
else:
separator = SKIPPED
self.main_layout = hbox(defs.small_margin, defs.spacing,
qlabel, separator, self.corner_layout,
self.toggle_button, self.close_button)
self.setLayout(self.main_layout)
connect_button(self.toggle_button, self.toggle_floating)
connect_button(self.close_button, self.toggle_visibility)
def toggle_floating(self):
self.parent().setFloating(not self.parent().isFloating())
self.update_tooltips()
def toggle_visibility(self):
self.parent().toggleViewAction().trigger()
def set_title(self, title):
self.label.setText(title)
def add_corner_widget(self, widget):
self.corner_layout.addWidget(widget)
def update_tooltips(self):
if self.parent().isFloating():
tooltip = N_('Attach')
else:
tooltip = N_('Detach')
self.toggle_button.setToolTip(tooltip)
def create_dock(title, parent, stretch=True, widget=None, fn=None):
"""Create a dock widget and set it up accordingly."""
dock = QtWidgets.QDockWidget(parent)
dock.setWindowTitle(title)
dock.setObjectName(title)
titlebar = DockTitleBarWidget(dock, title, stretch=stretch)
dock.setTitleBarWidget(titlebar)
dock.setAutoFillBackground(True)
if hasattr(parent, 'dockwidgets'):
parent.dockwidgets.append(dock)
if fn:
widget = fn(dock)
assert isinstance(widget, QtWidgets.QFrame),\
"Docked widget has to be a QFrame"
if widget:
dock.setWidget(widget)
return dock
def hide_dock(widget):
widget.toggleViewAction().setChecked(False)
widget.hide()
def create_menu(title, parent):
"""Create a menu and set its title."""
qmenu = DebouncingMenu(title, parent)
return qmenu
class DebouncingMenu(QtWidgets.QMenu):
"""Menu that debounces mouse release action ie. stops it if occurred
right after menu creation.
Disables annoying behaviour when RMB is pressed to show menu, cursor is
moved accidentally 1px onto newly created menu and released causing to
execute menu action
"""
threshold_ms = 400
def __init__(self, title, parent):
QtWidgets.QMenu.__init__(self, title, parent)
self.created_at = utils.epoch_millis()
def mouseReleaseEvent(self, event):
threshold = DebouncingMenu.threshold_ms
if (utils.epoch_millis() - self.created_at) > threshold:
QtWidgets.QMenu.mouseReleaseEvent(self, event)
def add_menu(title, parent):
"""Create a menu and set its title."""
menu = create_menu(title, parent)
parent.addAction(menu.menuAction())
return menu
def create_toolbutton(text=None, layout=None, tooltip=None, icon=None):
button = tool_button()
if icon is not None:
button.setIcon(icon)
button.setIconSize(QtCore.QSize(defs.small_icon, defs.small_icon))
if text is not None:
button.setText(' ' + text)
button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon)
if tooltip is not None:
button.setToolTip(tooltip)
if layout is not None:
layout.addWidget(button)
return button
# pylint: disable=line-too-long
def mimedata_from_paths(context, paths):
"""Return mimedata with a list of absolute path URLs
The text/x-moz-list format is always included by Qt, and doing
mimedata.removeFormat('text/x-moz-url') has no effect.
C.f. http://www.qtcentre.org/threads/44643-Dragging-text-uri-list-Qt-inserts-garbage
gnome-terminal expects utf-16 encoded text, but other terminals,
e.g. terminator, prefer utf-8, so allow cola.dragencoding
to override the default.
""" # noqa
cfg = context.cfg
abspaths = [core.abspath(path) for path in paths]
urls = [QtCore.QUrl.fromLocalFile(path) for path in abspaths]
mimedata = QtCore.QMimeData()
mimedata.setUrls(urls)
paths_text = core.list2cmdline(abspaths)
encoding = cfg.get('cola.dragencoding', 'utf-16')
moz_text = core.encode(paths_text, encoding=encoding)
mimedata.setData('text/x-moz-url', moz_text)
return mimedata
def path_mimetypes():
return ['text/uri-list', 'text/x-moz-url']
class BlockSignals(object):
"""Context manager for blocking a signals on a widget"""
def __init__(self, *widgets):
self.widgets = widgets
self.values = {}
def __enter__(self):
for w in self.widgets:
self.values[w] = w.blockSignals(True)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
for w in self.widgets:
w.blockSignals(self.values[w])
class Channel(QtCore.QObject):
finished = Signal(object)
result = Signal(object)
class Task(QtCore.QRunnable):
"""Disable auto-deletion to avoid gc issues
Python's garbage collector will try to double-free the task
once it's finished, so disable Qt's auto-deletion as a workaround.
"""
def __init__(self, parent):
QtCore.QRunnable.__init__(self)
self.channel = Channel(parent)
self.result = None
self.setAutoDelete(False)
def run(self):
self.result = self.task()
self.channel.result.emit(self.result)
self.channel.finished.emit(self)
# pylint: disable=no-self-use
def task(self):
return None
def connect(self, handler):
self.channel.result.connect(handler, type=Qt.QueuedConnection)
class SimpleTask(Task):
"""Run a simple callable as a task"""
def __init__(self, parent, fn, *args, **kwargs):
Task.__init__(self, parent)
self.fn = fn
self.args = args
self.kwargs = kwargs
def task(self):
return self.fn(*self.args, **self.kwargs)
class RunTask(QtCore.QObject):
"""Runs QRunnable instances and transfers control when they finish"""
def __init__(self, parent=None):
QtCore.QObject.__init__(self, parent)
self.tasks = []
self.task_details = {}
self.threadpool = QtCore.QThreadPool.globalInstance()
self.result_fn = None
def start(self, task, progress=None, finish=None, result=None):
"""Start the task and register a callback"""
self.result_fn = result
if progress is not None:
progress.show()
# prevents garbage collection bugs in certain PyQt4 versions
self.tasks.append(task)
task_id = id(task)
self.task_details[task_id] = (progress, finish, result)
task.channel.finished.connect(self.finish, type=Qt.QueuedConnection)
self.threadpool.start(task)
def finish(self, task):
task_id = id(task)
try:
self.tasks.remove(task)
except ValueError:
pass
try:
progress, finish, result = self.task_details[task_id]
del self.task_details[task_id]
except KeyError:
finish = progress = result = None
if progress is not None:
progress.hide()
if result is not None:
result(task.result)
if finish is not None:
finish(task)
# Syntax highlighting
def rgb(r, g, b):
color = QtGui.QColor()
color.setRgb(r, g, b)
return color
def rgba(r, g, b, a=255):
color = rgb(r, g, b)
color.setAlpha(a)
return color
def RGB(args):
return rgb(*args)
def rgb_css(color):
"""Convert a QColor into an rgb(int, int, int) CSS string"""
return 'rgb(%d, %d, %d)' % (color.red(), color.green(), color.blue())
def rgb_hex(color):
"""Convert a QColor into a hex aabbcc string"""
return '%02x%02x%02x' % (color.red(), color.green(), color.blue())
def hsl(h, s, l):
return QtGui.QColor.fromHslF(
utils.clamp(h, 0.0, 1.0),
utils.clamp(s, 0.0, 1.0),
utils.clamp(l, 0.0, 1.0)
)
def hsl_css(h, s, l):
return rgb_css(hsl(h, s, l))
def make_format(fg=None, bg=None, bold=False):
fmt = QtGui.QTextCharFormat()
if fg:
fmt.setForeground(fg)
if bg:
fmt.setBackground(bg)
if bold:
fmt.setFontWeight(QtGui.QFont.Bold)
return fmt
class ImageFormats(object):
def __init__(self):
# returns a list of QByteArray objects
formats_qba = QtGui.QImageReader.supportedImageFormats()
# portability: python3 data() returns bytes, python2 returns str
decode = core.decode
formats = [decode(x.data()) for x in formats_qba]
self.extensions = set(['.' + fmt for fmt in formats])
def ok(self, filename):
_, ext = os.path.splitext(filename)
return ext.lower() in self.extensions
| antoniodemora/git-cola | cola/qtutils.py | Python | gpl-2.0 | 30,765 | 0.000163 |
from cloudify import ctx
from cloudify.exceptions import NonRecoverableError
from cloudify.state import ctx_parameters as inputs
import subprocess
import os
import re
import sys
import time
import threading
import platform
from StringIO import StringIO
from cloudify_rest_client import CloudifyClient
from cloudify import utils
if 'MANAGER_REST_PROTOCOL' in os.environ and os.environ['MANAGER_REST_PROTOCOL'] == "https":
client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port(), protocol='https', trust_all=True)
else:
client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port())
def convert_env_value_to_string(envDict):
for key, value in envDict.items():
envDict[str(key)] = str(envDict.pop(key))
def get_host(entity):
if entity.instance.relationships:
for relationship in entity.instance.relationships:
if 'cloudify.relationships.contained_in' in relationship.type_hierarchy:
return relationship.target
return None
def has_attribute_mapping(entity, attribute_name):
ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, entity.node.properties))
mapping_configuration = entity.node.properties.get('_a4c_att_' + attribute_name, None)
if mapping_configuration is not None:
if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name:
return False
else:
return True
return False
def process_attribute_mapping(entity, attribute_name, data_retriever_function):
# This is where attribute mapping is defined in the cloudify type
mapping_configuration = entity.node.properties['_a4c_att_' + attribute_name]
ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration))
# If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name
# Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET
if mapping_configuration['parameters'][0] == 'SELF':
return data_retriever_function(entity, mapping_configuration['parameters'][1])
elif mapping_configuration['parameters'][0] == 'TARGET' and entity.instance.relationships:
for relationship in entity.instance.relationships:
if mapping_configuration['parameters'][1] in relationship.type_hierarchy:
return data_retriever_function(relationship.target, mapping_configuration['parameters'][2])
return ""
def get_nested_attribute(entity, attribute_names):
deep_properties = get_attribute(entity, attribute_names[0])
attribute_names_iter = iter(attribute_names)
next(attribute_names_iter)
for attribute_name in attribute_names_iter:
if deep_properties is None:
return ""
else:
deep_properties = deep_properties.get(attribute_name, None)
return deep_properties
def _all_instances_get_nested_attribute(entity, attribute_names):
return None
def get_attribute(entity, attribute_name):
if has_attribute_mapping(entity, attribute_name):
# First check if any mapping exist for attribute
mapped_value = process_attribute_mapping(entity, attribute_name, get_attribute)
ctx.logger.info('Mapping exists for attribute {0} with value {1}'.format(attribute_name, mapped_value))
return mapped_value
# No mapping exist, try to get directly the attribute from the entity
attribute_value = entity.instance.runtime_properties.get(attribute_name, None)
if attribute_value is not None:
ctx.logger.info('Found the attribute {0} with value {1} on the node {2}'.format(attribute_name, attribute_value, entity.node.id))
return attribute_value
# Attribute retrieval fails, fall back to property
property_value = entity.node.properties.get(attribute_name, None)
if property_value is not None:
return property_value
# Property retrieval fails, fall back to host instance
host = get_host(entity)
if host is not None:
ctx.logger.info('Attribute not found {0} go up to the parent node {1}'.format(attribute_name, host.node.id))
return get_attribute(host, attribute_name)
# Nothing is found
return ""
def _all_instances_get_attribute(entity, attribute_name):
result_map = {}
# get all instances data using cfy rest client
# we have to get the node using the rest client with node_instance.node_id
# then we will have the relationships
node = client.nodes.get(ctx.deployment.id, entity.node.id)
all_node_instances = client.node_instances.list(ctx.deployment.id, entity.node.id)
for node_instance in all_node_instances:
prop_value = __recursively_get_instance_data(node, node_instance, attribute_name)
if prop_value is not None:
ctx.logger.info('Found the property/attribute {0} with value {1} on the node {2} instance {3}'.format(attribute_name, prop_value, entity.node.id,
node_instance.id))
result_map[node_instance.id + '_'] = prop_value
return result_map
def get_property(entity, property_name):
# Try to get the property value on the node
property_value = entity.node.properties.get(property_name, None)
if property_value is not None:
ctx.logger.info('Found the property {0} with value {1} on the node {2}'.format(property_name, property_value, entity.node.id))
return property_value
# No property found on the node, fall back to the host
host = get_host(entity)
if host is not None:
ctx.logger.info('Property not found {0} go up to the parent node {1}'.format(property_name, host.node.id))
return get_property(host, property_name)
return ""
def get_instance_list(node_id):
result = ''
all_node_instances = client.node_instances.list(ctx.deployment.id, node_id)
for node_instance in all_node_instances:
if len(result) > 0:
result += ','
result += node_instance.id
return result
def get_host_node_name(instance):
for relationship in instance.relationships:
if 'cloudify.relationships.contained_in' in relationship.type_hierarchy:
return relationship.target.node.id
return None
def __get_relationship(node, target_name, relationship_type):
for relationship in node.relationships:
if relationship.get('target_id') == target_name and relationship_type in relationship.get('type_hierarchy'):
return relationship
return None
def __has_attribute_mapping(node, attribute_name):
ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, node.properties))
mapping_configuration = node.properties.get('_a4c_att_' + attribute_name, None)
if mapping_configuration is not None:
if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name:
return False
else:
return True
return False
def __process_attribute_mapping(node, node_instance, attribute_name, data_retriever_function):
# This is where attribute mapping is defined in the cloudify type
mapping_configuration = node.properties['_a4c_att_' + attribute_name]
ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration))
# If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name
# Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET
if mapping_configuration['parameters'][0] == 'SELF':
return data_retriever_function(node, node_instance, mapping_configuration['parameters'][1])
elif mapping_configuration['parameters'][0] == 'TARGET' and node_instance.relationships:
for rel in node_instance.relationships:
relationship = __get_relationship(node, rel.get('target_name'), rel.get('type'))
if mapping_configuration['parameters'][1] in relationship.get('type_hierarchy'):
target_instance = client.node_instances.get(rel.get('target_id'))
target_node = client.nodes.get(ctx.deployment.id, target_instance.node_id)
return data_retriever_function(target_node, target_instance, mapping_configuration['parameters'][2])
return None
def __recursively_get_instance_data(node, node_instance, attribute_name):
if __has_attribute_mapping(node, attribute_name):
return __process_attribute_mapping(node, node_instance, attribute_name, __recursively_get_instance_data)
attribute_value = node_instance.runtime_properties.get(attribute_name, None)
if attribute_value is not None:
return attribute_value
elif node_instance.relationships:
for rel in node_instance.relationships:
# on rel we have target_name, target_id (instanceId), type
relationship = __get_relationship(node, rel.get('target_name'), rel.get('type'))
if 'cloudify.relationships.contained_in' in relationship.get('type_hierarchy'):
parent_instance = client.node_instances.get(rel.get('target_id'))
parent_node = client.nodes.get(ctx.deployment.id, parent_instance.node_id)
return __recursively_get_instance_data(parent_node, parent_instance, attribute_name)
return None
else:
return None
def parse_output(output):
# by convention, the last output is the result of the operation
last_output = None
outputs = {}
pattern = re.compile('EXPECTED_OUTPUT_(\w+)=(.*)')
for line in output.splitlines():
match = pattern.match(line)
if match is None:
last_output = line
else:
output_name = match.group(1)
output_value = match.group(2)
outputs[output_name] = output_value
return {'last_output': last_output, 'outputs': outputs}
def execute(script_path, process, outputNames, command_prefix=None, cwd=None):
os.chmod(script_path, 0755)
on_posix = 'posix' in sys.builtin_module_names
env = os.environ.copy()
process_env = process.get('env', {})
env.update(process_env)
if outputNames is not None:
env['EXPECTED_OUTPUTS'] = outputNames
if platform.system() == 'Windows':
wrapper_path = ctx.download_resource("scriptWrapper.bat")
else:
wrapper_path = ctx.download_resource("scriptWrapper.sh")
os.chmod(wrapper_path, 0755)
command = '{0} {1}'.format(wrapper_path, script_path)
else:
command = script_path
if command_prefix is not None:
command = "{0} {1}".format(command_prefix, command)
ctx.logger.info('Executing: {0} in env {1}'.format(command, env))
process = subprocess.Popen(command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env,
cwd=cwd,
bufsize=1,
close_fds=on_posix)
return_code = None
stdout_consumer = OutputConsumer(process.stdout)
stderr_consumer = OutputConsumer(process.stderr)
while True:
return_code = process.poll()
if return_code is not None:
break
time.sleep(0.1)
stdout_consumer.join()
stderr_consumer.join()
parsed_output = parse_output(stdout_consumer.buffer.getvalue())
if outputNames is not None:
outputNameList = outputNames.split(';')
for outputName in outputNameList:
ctx.logger.info('Ouput name: {0} value : {1}'.format(outputName, parsed_output['outputs'].get(outputName, None)))
if return_code != 0:
error_message = "Script {0} encountered error with return code {1} and standard output {2}, error output {3}".format(command, return_code,
stdout_consumer.buffer.getvalue(),
stderr_consumer.buffer.getvalue())
error_message = str(unicode(error_message, errors='ignore'))
ctx.logger.error(error_message)
raise NonRecoverableError(error_message)
else:
ok_message = "Script {0} executed normally with standard output {1} and error output {2}".format(command, stdout_consumer.buffer.getvalue(),
stderr_consumer.buffer.getvalue())
ok_message = str(unicode(ok_message, errors='ignore'))
ctx.logger.info(ok_message)
return parsed_output
class OutputConsumer(object):
def __init__(self, out):
self.out = out
self.buffer = StringIO()
self.consumer = threading.Thread(target=self.consume_output)
self.consumer.daemon = True
self.consumer.start()
def consume_output(self):
for line in iter(self.out.readline, b''):
self.buffer.write(line)
self.out.close()
def join(self):
self.consumer.join()
env_map = {}
env_map['NODE'] = ctx.node.id
env_map['INSTANCE'] = ctx.instance.id
env_map['INSTANCES'] = get_instance_list(ctx.node.id)
env_map['HOST'] = get_host_node_name(ctx.instance)
env_map['TOMCAT_HOME'] = r'/opt/tomcat'
env_map['TOMCAT_PORT'] = r'80'
env_map['TOMCAT_URL'] = r'http://mirrors.ircam.fr/pub/apache/tomcat/tomcat-8/v8.0.29/bin/apache-tomcat-8.0.29.tar.gz'
new_script_process = {'env': env_map}
ctx.logger.info('Operation is executed with inputs {0}'.format(inputs))
if inputs.get('process', None) is not None and inputs['process'].get('env', None) is not None:
ctx.logger.info('Operation is executed with environment variable {0}'.format(inputs['process']['env']))
new_script_process['env'].update(inputs['process']['env'])
operationOutputNames = None
convert_env_value_to_string(new_script_process['env'])
parsed_output = execute(ctx.download_resource('artifacts/tomcat-war-types/scripts/tomcat_install.sh'), new_script_process, operationOutputNames)
for k,v in parsed_output['outputs'].items():
ctx.logger.info('Output name: {0} value: {1}'.format(k, v))
ctx.instance.runtime_properties['_a4c_OO:tosca.interfaces.node.lifecycle.Standard:create:{0}'.format(k)] = v
ctx.instance.runtime_properties['server_url'] = r'http://' + get_attribute(ctx, 'public_ip_address') + r':' + r'80'
ctx.instance.update()
| victorkeophila/alien4cloud-cloudify3-provider | src/test/resources/outputs/blueprints/openstack/tomcat/wrapper/Tomcat/tosca.interfaces.node.lifecycle.Standard/create/artifacts/tomcat-war-types/scripts/_a4c_tomcat_install.py | Python | apache-2.0 | 14,972 | 0.004542 |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.usage}, a command line option parsing library.
"""
from twisted.trial import unittest
from twisted.python import usage
class WellBehaved(usage.Options):
optParameters = [['long', 'w', 'default', 'and a docstring'],
['another', 'n', 'no docstring'],
['longonly', None, 'noshort'],
['shortless', None, 'except',
'this one got docstring'],
]
optFlags = [['aflag', 'f',
"""
flagallicious docstringness for this here
"""],
['flout', 'o'],
]
def opt_myflag(self):
self.opts['myflag'] = "PONY!"
def opt_myparam(self, value):
self.opts['myparam'] = "%s WITH A PONY!" % (value,)
class ParseCorrectnessTest(unittest.TestCase):
"""
Test Options.parseArgs for correct values under good conditions.
"""
def setUp(self):
"""
Instantiate and parseOptions a well-behaved Options class.
"""
self.niceArgV = ("--long Alpha -n Beta "
"--shortless Gamma -f --myflag "
"--myparam Tofu").split()
self.nice = WellBehaved()
self.nice.parseOptions(self.niceArgV)
def test_checkParameters(self):
"""
Checking that parameters have correct values.
"""
self.assertEqual(self.nice.opts['long'], "Alpha")
self.assertEqual(self.nice.opts['another'], "Beta")
self.assertEqual(self.nice.opts['longonly'], "noshort")
self.assertEqual(self.nice.opts['shortless'], "Gamma")
def test_checkFlags(self):
"""
Checking that flags have correct values.
"""
self.assertEqual(self.nice.opts['aflag'], 1)
self.assertEqual(self.nice.opts['flout'], 0)
def test_checkCustoms(self):
"""
Checking that custom flags and parameters have correct values.
"""
self.assertEqual(self.nice.opts['myflag'], "PONY!")
self.assertEqual(self.nice.opts['myparam'], "Tofu WITH A PONY!")
class TypedOptions(usage.Options):
optParameters = [
['fooint', None, 392, 'Foo int', int],
['foofloat', None, 4.23, 'Foo float', float],
['eggint', None, None, 'Egg int without default', int],
['eggfloat', None, None, 'Egg float without default', float],
]
def opt_under_score(self, value):
"""
This option has an underscore in its name to exercise the _ to -
translation.
"""
self.underscoreValue = value
opt_u = opt_under_score
class TypedTestCase(unittest.TestCase):
"""
Test Options.parseArgs for options with forced types.
"""
def setUp(self):
self.usage = TypedOptions()
def test_defaultValues(self):
"""
Test parsing of default values.
"""
argV = []
self.usage.parseOptions(argV)
self.assertEqual(self.usage.opts['fooint'], 392)
self.assert_(isinstance(self.usage.opts['fooint'], int))
self.assertEqual(self.usage.opts['foofloat'], 4.23)
self.assert_(isinstance(self.usage.opts['foofloat'], float))
self.assertEqual(self.usage.opts['eggint'], None)
self.assertEqual(self.usage.opts['eggfloat'], None)
def test_parsingValues(self):
"""
Test basic parsing of int and float values.
"""
argV = ("--fooint 912 --foofloat -823.1 "
"--eggint 32 --eggfloat 21").split()
self.usage.parseOptions(argV)
self.assertEqual(self.usage.opts['fooint'], 912)
self.assert_(isinstance(self.usage.opts['fooint'], int))
self.assertEqual(self.usage.opts['foofloat'], -823.1)
self.assert_(isinstance(self.usage.opts['foofloat'], float))
self.assertEqual(self.usage.opts['eggint'], 32)
self.assert_(isinstance(self.usage.opts['eggint'], int))
self.assertEqual(self.usage.opts['eggfloat'], 21.)
self.assert_(isinstance(self.usage.opts['eggfloat'], float))
def test_underscoreOption(self):
"""
A dash in an option name is translated to an underscore before being
dispatched to a handler.
"""
self.usage.parseOptions(['--under-score', 'foo'])
self.assertEqual(self.usage.underscoreValue, 'foo')
def test_underscoreOptionAlias(self):
"""
An option name with a dash in it can have an alias.
"""
self.usage.parseOptions(['-u', 'bar'])
self.assertEqual(self.usage.underscoreValue, 'bar')
def test_invalidValues(self):
"""
Check that passing wrong values raises an error.
"""
argV = "--fooint egg".split()
self.assertRaises(usage.UsageError, self.usage.parseOptions, argV)
class WrongTypedOptions(usage.Options):
optParameters = [
['barwrong', None, None, 'Bar with wrong coerce', 'he']
]
class WeirdCallableOptions(usage.Options):
def _bar(value):
raise RuntimeError("Ouch")
def _foo(value):
raise ValueError("Yay")
optParameters = [
['barwrong', None, None, 'Bar with strange callable', _bar],
['foowrong', None, None, 'Foo with strange callable', _foo]
]
class WrongTypedTestCase(unittest.TestCase):
"""
Test Options.parseArgs for wrong coerce options.
"""
def test_nonCallable(self):
"""
Check that using a non callable type fails.
"""
us = WrongTypedOptions()
argV = "--barwrong egg".split()
self.assertRaises(TypeError, us.parseOptions, argV)
def test_notCalledInDefault(self):
"""
Test that the coerce functions are not called if no values are
provided.
"""
us = WeirdCallableOptions()
argV = []
us.parseOptions(argV)
def test_weirdCallable(self):
"""
Test what happens when coerce functions raise errors.
"""
us = WeirdCallableOptions()
argV = "--foowrong blah".split()
# ValueError is swallowed as UsageError
e = self.assertRaises(usage.UsageError, us.parseOptions, argV)
self.assertEqual(str(e), "Parameter type enforcement failed: Yay")
us = WeirdCallableOptions()
argV = "--barwrong blah".split()
# RuntimeError is not swallowed
self.assertRaises(RuntimeError, us.parseOptions, argV)
class OutputTest(unittest.TestCase):
def test_uppercasing(self):
"""
Error output case adjustment does not mangle options
"""
opt = WellBehaved()
e = self.assertRaises(usage.UsageError,
opt.parseOptions, ['-Z'])
self.assertEqual(str(e), 'option -Z not recognized')
class InquisitionOptions(usage.Options):
optFlags = [
('expect', 'e'),
]
optParameters = [
('torture-device', 't',
'comfy-chair',
'set preferred torture device'),
]
class HolyQuestOptions(usage.Options):
optFlags = [('horseback', 'h',
'use a horse'),
('for-grail', 'g'),
]
class SubCommandOptions(usage.Options):
optFlags = [('europian-swallow', None,
'set default swallow type to Europian'),
]
subCommands = [
('inquisition', 'inquest', InquisitionOptions,
'Perform an inquisition'),
('holyquest', 'quest', HolyQuestOptions,
'Embark upon a holy quest'),
]
class SubCommandTest(unittest.TestCase):
def test_simpleSubcommand(self):
o = SubCommandOptions()
o.parseOptions(['--europian-swallow', 'inquisition'])
self.assertEqual(o['europian-swallow'], True)
self.assertEqual(o.subCommand, 'inquisition')
self.failUnless(isinstance(o.subOptions, InquisitionOptions))
self.assertEqual(o.subOptions['expect'], False)
self.assertEqual(o.subOptions['torture-device'], 'comfy-chair')
def test_subcommandWithFlagsAndOptions(self):
o = SubCommandOptions()
o.parseOptions(['inquisition', '--expect', '--torture-device=feather'])
self.assertEqual(o['europian-swallow'], False)
self.assertEqual(o.subCommand, 'inquisition')
self.failUnless(isinstance(o.subOptions, InquisitionOptions))
self.assertEqual(o.subOptions['expect'], True)
self.assertEqual(o.subOptions['torture-device'], 'feather')
def test_subcommandAliasWithFlagsAndOptions(self):
o = SubCommandOptions()
o.parseOptions(['inquest', '--expect', '--torture-device=feather'])
self.assertEqual(o['europian-swallow'], False)
self.assertEqual(o.subCommand, 'inquisition')
self.failUnless(isinstance(o.subOptions, InquisitionOptions))
self.assertEqual(o.subOptions['expect'], True)
self.assertEqual(o.subOptions['torture-device'], 'feather')
def test_anotherSubcommandWithFlagsAndOptions(self):
o = SubCommandOptions()
o.parseOptions(['holyquest', '--for-grail'])
self.assertEqual(o['europian-swallow'], False)
self.assertEqual(o.subCommand, 'holyquest')
self.failUnless(isinstance(o.subOptions, HolyQuestOptions))
self.assertEqual(o.subOptions['horseback'], False)
self.assertEqual(o.subOptions['for-grail'], True)
def test_noSubcommand(self):
o = SubCommandOptions()
o.parseOptions(['--europian-swallow'])
self.assertEqual(o['europian-swallow'], True)
self.assertEqual(o.subCommand, None)
self.failIf(hasattr(o, 'subOptions'))
def test_defaultSubcommand(self):
o = SubCommandOptions()
o.defaultSubCommand = 'inquest'
o.parseOptions(['--europian-swallow'])
self.assertEqual(o['europian-swallow'], True)
self.assertEqual(o.subCommand, 'inquisition')
self.failUnless(isinstance(o.subOptions, InquisitionOptions))
self.assertEqual(o.subOptions['expect'], False)
self.assertEqual(o.subOptions['torture-device'], 'comfy-chair')
def test_subCommandParseOptionsHasParent(self):
class SubOpt(usage.Options):
def parseOptions(self, *a, **kw):
self.sawParent = self.parent
usage.Options.parseOptions(self, *a, **kw)
class Opt(usage.Options):
subCommands = [
('foo', 'f', SubOpt, 'bar'),
]
o = Opt()
o.parseOptions(['foo'])
self.failUnless(hasattr(o.subOptions, 'sawParent'))
self.assertEqual(o.subOptions.sawParent , o)
def test_subCommandInTwoPlaces(self):
"""
The .parent pointer is correct even when the same Options class is
used twice.
"""
class SubOpt(usage.Options):
pass
class OptFoo(usage.Options):
subCommands = [
('foo', 'f', SubOpt, 'quux'),
]
class OptBar(usage.Options):
subCommands = [
('bar', 'b', SubOpt, 'quux'),
]
oFoo = OptFoo()
oFoo.parseOptions(['foo'])
oBar=OptBar()
oBar.parseOptions(['bar'])
self.failUnless(hasattr(oFoo.subOptions, 'parent'))
self.failUnless(hasattr(oBar.subOptions, 'parent'))
self.failUnlessIdentical(oFoo.subOptions.parent, oFoo)
self.failUnlessIdentical(oBar.subOptions.parent, oBar)
class HelpStringTest(unittest.TestCase):
def setUp(self):
"""
Instantiate a well-behaved Options class.
"""
self.niceArgV = ("--long Alpha -n Beta "
"--shortless Gamma -f --myflag "
"--myparam Tofu").split()
self.nice = WellBehaved()
def test_noGoBoom(self):
"""
__str__ shouldn't go boom.
"""
try:
self.nice.__str__()
except Exception, e:
self.fail(e)
def test_whitespaceStripFlagsAndParameters(self):
"""
Extra whitespace in flag and parameters docs is stripped.
"""
# We test this by making sure aflag and it's help string are on the
# same line.
lines = [s for s in str(self.nice).splitlines() if s.find("aflag")>=0]
self.failUnless(len(lines) > 0)
self.failUnless(lines[0].find("flagallicious") >= 0)
class PortCoerceTestCase(unittest.TestCase):
"""
Test the behavior of L{usage.portCoerce}.
"""
def test_validCoerce(self):
"""
Test the answers with valid input.
"""
self.assertEqual(0, usage.portCoerce("0"))
self.assertEqual(3210, usage.portCoerce("3210"))
self.assertEqual(65535, usage.portCoerce("65535"))
def test_errorCoerce(self):
"""
Test error path.
"""
self.assertRaises(ValueError, usage.portCoerce, "")
self.assertRaises(ValueError, usage.portCoerce, "-21")
self.assertRaises(ValueError, usage.portCoerce, "212189")
self.assertRaises(ValueError, usage.portCoerce, "foo")
class ZshCompleterTestCase(unittest.TestCase):
"""
Test the behavior of the various L{twisted.usage.Completer} classes
for producing output usable by zsh tab-completion system.
"""
def test_completer(self):
"""
Completer produces zsh shell-code that produces no completion matches.
"""
c = usage.Completer()
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option:')
c = usage.Completer(descr='some action', repeat=True)
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, '*:some action:')
def test_files(self):
"""
CompleteFiles produces zsh shell-code that completes file names
according to a glob.
"""
c = usage.CompleteFiles()
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option (*):_files -g "*"')
c = usage.CompleteFiles('*.py')
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option (*.py):_files -g "*.py"')
c = usage.CompleteFiles('*.py', descr="some action", repeat=True)
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, '*:some action (*.py):_files -g "*.py"')
def test_dirs(self):
"""
CompleteDirs produces zsh shell-code that completes directory names.
"""
c = usage.CompleteDirs()
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option:_directories')
c = usage.CompleteDirs(descr="some action", repeat=True)
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, '*:some action:_directories')
def test_list(self):
"""
CompleteList produces zsh shell-code that completes words from a fixed
list of possibilities.
"""
c = usage.CompleteList('ABC')
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option:(A B C)')
c = usage.CompleteList(['1', '2', '3'])
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option:(1 2 3)')
c = usage.CompleteList(['1', '2', '3'], descr='some action',
repeat=True)
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, '*:some action:(1 2 3)')
def test_multiList(self):
"""
CompleteMultiList produces zsh shell-code that completes multiple
comma-separated words from a fixed list of possibilities.
"""
c = usage.CompleteMultiList('ABC')
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option:_values -s , \'some-option\' A B C')
c = usage.CompleteMultiList(['1','2','3'])
got = c._shellCode('some-option', usage._ZSH)
self.assertEqual(got, ':some-option:_values -s , \'some-option\' 1 2 3')
c = usage.CompleteMultiList(['1','2','3'], descr='some action',
repeat=True)
got = c._shellCode('some-option', usage._ZSH)
expected = '*:some action:_values -s , \'some action\' 1 2 3'
self.assertEqual(got, expected)
def test_usernames(self):
"""
CompleteUsernames produces zsh shell-code that completes system
usernames.
"""
c = usage.CompleteUsernames()
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, ':some-option:_users')
c = usage.CompleteUsernames(descr='some action', repeat=True)
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, '*:some action:_users')
def test_groups(self):
"""
CompleteGroups produces zsh shell-code that completes system group
names.
"""
c = usage.CompleteGroups()
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, ':group:_groups')
c = usage.CompleteGroups(descr='some action', repeat=True)
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, '*:some action:_groups')
def test_hostnames(self):
"""
CompleteHostnames produces zsh shell-code that completes hostnames.
"""
c = usage.CompleteHostnames()
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, ':some-option:_hosts')
c = usage.CompleteHostnames(descr='some action', repeat=True)
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, '*:some action:_hosts')
def test_userAtHost(self):
"""
CompleteUserAtHost produces zsh shell-code that completes hostnames or
a word of the form <username>@<hostname>.
"""
c = usage.CompleteUserAtHost()
out = c._shellCode('some-option', usage._ZSH)
self.assertTrue(out.startswith(':host | user@host:'))
c = usage.CompleteUserAtHost(descr='some action', repeat=True)
out = c._shellCode('some-option', usage._ZSH)
self.assertTrue(out.startswith('*:some action:'))
def test_netInterfaces(self):
"""
CompleteNetInterfaces produces zsh shell-code that completes system
network interface names.
"""
c = usage.CompleteNetInterfaces()
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, ':some-option:_net_interfaces')
c = usage.CompleteNetInterfaces(descr='some action', repeat=True)
out = c._shellCode('some-option', usage._ZSH)
self.assertEqual(out, '*:some action:_net_interfaces')
class CompleterNotImplementedTestCase(unittest.TestCase):
"""
Using an unknown shell constant with the various Completer() classes
should raise NotImplementedError
"""
def test_unknownShell(self):
"""
Using an unknown shellType should raise NotImplementedError
"""
classes = [usage.Completer, usage.CompleteFiles,
usage.CompleteDirs, usage.CompleteList,
usage.CompleteMultiList, usage.CompleteUsernames,
usage.CompleteGroups, usage.CompleteHostnames,
usage.CompleteUserAtHost, usage.CompleteNetInterfaces]
for cls in classes:
try:
action = cls()
except:
action = cls(None)
self.assertRaises(NotImplementedError, action._shellCode,
None, "bad_shell_type")
| nlloyd/SubliminalCollaborator | libs/twisted/test/test_usage.py | Python | apache-2.0 | 19,879 | 0.001811 |
# Authors: Gael Varoquaux <[email protected]>
# Justin Vincent
# Lars Buitinck
# License: BSD 3 clause
import pickle
import numpy as np
import pytest
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.fixes import MaskedArray
from sklearn.utils.fixes import _joblib_parallel_args
from sklearn.utils.fixes import _object_dtype_isnan
def test_masked_array_obj_dtype_pickleable():
marr = MaskedArray([1, None, 'a'], dtype=object)
for mask in (True, False, [0, 1, 0]):
marr.mask = mask
marr_pickled = pickle.loads(pickle.dumps(marr))
assert_array_equal(marr.data, marr_pickled.data)
assert_array_equal(marr.mask, marr_pickled.mask)
@pytest.mark.parametrize('joblib_version', ('0.11', '0.12.0'))
def test_joblib_parallel_args(monkeypatch, joblib_version):
import joblib
monkeypatch.setattr(joblib, '__version__', joblib_version)
if joblib_version == '0.12.0':
# arguments are simply passed through
assert _joblib_parallel_args(prefer='threads') == {'prefer': 'threads'}
assert _joblib_parallel_args(prefer='processes', require=None) == {
'prefer': 'processes', 'require': None}
assert _joblib_parallel_args(non_existing=1) == {'non_existing': 1}
elif joblib_version == '0.11':
# arguments are mapped to the corresponding backend
assert _joblib_parallel_args(prefer='threads') == {
'backend': 'threading'}
assert _joblib_parallel_args(prefer='processes') == {
'backend': 'multiprocessing'}
with pytest.raises(ValueError):
_joblib_parallel_args(prefer='invalid')
assert _joblib_parallel_args(
prefer='processes', require='sharedmem') == {
'backend': 'threading'}
with pytest.raises(ValueError):
_joblib_parallel_args(require='invalid')
with pytest.raises(NotImplementedError):
_joblib_parallel_args(verbose=True)
else:
raise ValueError
@pytest.mark.parametrize("dtype, val", ([object, 1],
[object, "a"],
[float, 1]))
def test_object_dtype_isnan(dtype, val):
X = np.array([[val, np.nan],
[np.nan, val]], dtype=dtype)
expected_mask = np.array([[False, True],
[True, False]])
mask = _object_dtype_isnan(X)
assert_array_equal(mask, expected_mask)
| chrsrds/scikit-learn | sklearn/utils/tests/test_fixes.py | Python | bsd-3-clause | 2,534 | 0 |
# -*- test-case-name: txdav.who.test.test_augment -*-
##
# Copyright (c) 2013-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Augmenting Directory Service
"""
__all__ = [
"AugmentedDirectoryService",
]
import time
from zope.interface import implementer
from twisted.internet.defer import inlineCallbacks, returnValue, succeed
from twistedcaldav.directory.augment import AugmentRecord
from twext.python.log import Logger
from twext.who.directory import DirectoryRecord
from twext.who.directory import DirectoryService as BaseDirectoryService
from twext.who.idirectory import (
IDirectoryService, RecordType, FieldName as BaseFieldName, NotAllowedError
)
from twext.who.util import ConstantsContainer
from txdav.common.idirectoryservice import IStoreDirectoryService
from txdav.who.directory import (
CalendarDirectoryRecordMixin, CalendarDirectoryServiceMixin,
)
from txdav.who.idirectory import (
AutoScheduleMode, FieldName, RecordType as CalRecordType
)
log = Logger()
def timed(f):
"""
A decorator which keeps track of the wrapped function's call count and
total duration
"""
def recordTiming(result, key, startTime):
"""
Figures out how much time to add to the total time spent within the
method identified by key and stores that in the timings dict.
@param result: the result of the wrapped method
@param timings: the dictionary to store timings in
@type timings: C{dict}
@param key: the method name
@type key: C{str}
@param startTime: the start time of the call in seconds
@type startTime: C{float}
"""
AugmentedDirectoryService._addTiming(key, time.time() - startTime)
return result
def timingWrapper(self, *args, **kwds):
"""
Records the start time of the call and the method's name
"""
startTime = time.time()
d = f(self, *args, **kwds)
d.addBoth(recordTiming, f.func_name, startTime)
return d
return timingWrapper
@implementer(IDirectoryService, IStoreDirectoryService)
class AugmentedDirectoryService(
BaseDirectoryService, CalendarDirectoryServiceMixin
):
"""
Augmented directory service.
This is a directory service that wraps an L{IDirectoryService} and augments
directory records with additional or modified fields.
"""
fieldName = ConstantsContainer((
BaseFieldName,
FieldName,
))
_timings = {}
def __init__(self, directory, store, augmentDB):
BaseDirectoryService.__init__(self, directory.realmName)
self._directory = directory
self._store = store
self._augmentDB = augmentDB
# An LDAP DS has extra info to expose via the dashboard
# This is assigned in buildDirectory()
self._ldapDS = None
@classmethod
def _addTiming(cls, key, duration):
if key not in cls._timings:
cls._timings[key] = (0, 0.0)
count, timeSpent = cls._timings[key]
count += 1
timeSpent += duration
cls._timings[key] = (count, timeSpent)
def flush(self):
return self._directory.flush()
def stats(self):
results = {}
results.update(self._timings)
# An LDAP DS has extra info to expose via the dashboard
if self._ldapDS is not None:
results.update(self._ldapDS.poolStats)
return succeed(results)
@property
def recordType(self):
# Defer to the directory service we're augmenting
return self._directory.recordType
def recordTypes(self):
# Defer to the directory service we're augmenting
return self._directory.recordTypes()
@inlineCallbacks
def recordsFromExpression(
self, expression, recordTypes=None,
limitResults=None, timeoutSeconds=None
):
records = yield self._directory.recordsFromExpression(
expression, recordTypes=recordTypes,
limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@inlineCallbacks
def recordsWithFieldValue(
self, fieldName, value, limitResults=None, timeoutSeconds=None
):
records = yield self._directory.recordsWithFieldValue(
fieldName, value,
limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@timed
@inlineCallbacks
def recordWithUID(self, uid, timeoutSeconds=None):
# MOVE2WHO, REMOVE THIS:
if not isinstance(uid, unicode):
# log.warn("Need to change uid to unicode")
uid = uid.decode("utf-8")
record = yield self._directory.recordWithUID(
uid, timeoutSeconds=timeoutSeconds
)
record = yield self._augment(record)
returnValue(record)
@timed
@inlineCallbacks
def recordWithGUID(self, guid, timeoutSeconds=None):
record = yield self._directory.recordWithGUID(
guid, timeoutSeconds=timeoutSeconds
)
record = yield self._augment(record)
returnValue(record)
@timed
@inlineCallbacks
def recordsWithRecordType(
self, recordType, limitResults=None, timeoutSeconds=None
):
records = yield self._directory.recordsWithRecordType(
recordType, limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@timed
@inlineCallbacks
def recordWithShortName(self, recordType, shortName, timeoutSeconds=None):
# MOVE2WHO, REMOVE THIS:
if not isinstance(shortName, unicode):
# log.warn("Need to change shortName to unicode")
shortName = shortName.decode("utf-8")
record = yield self._directory.recordWithShortName(
recordType, shortName, timeoutSeconds=timeoutSeconds
)
record = yield self._augment(record)
returnValue(record)
@timed
@inlineCallbacks
def recordsWithEmailAddress(
self, emailAddress, limitResults=None, timeoutSeconds=None
):
# MOVE2WHO, REMOVE THIS:
if not isinstance(emailAddress, unicode):
# log.warn("Need to change emailAddress to unicode")
emailAddress = emailAddress.decode("utf-8")
records = yield self._directory.recordsWithEmailAddress(
emailAddress,
limitResults=limitResults, timeoutSeconds=timeoutSeconds
)
augmented = []
for record in records:
record = yield self._augment(record)
augmented.append(record)
returnValue(augmented)
@timed
def recordWithCalendarUserAddress(self, *args, **kwds):
return CalendarDirectoryServiceMixin.recordWithCalendarUserAddress(
self, *args, **kwds
)
@timed
def recordsMatchingTokens(self, *args, **kwds):
return CalendarDirectoryServiceMixin.recordsMatchingTokens(
self, *args, **kwds
)
@timed
def recordsMatchingFields(self, *args, **kwds):
return CalendarDirectoryServiceMixin.recordsMatchingFields(
self, *args, **kwds
)
@timed
@inlineCallbacks
def updateRecords(self, records, create=False):
"""
Pull out the augmented fields from each record, apply those to the
augments database, then update the base records.
"""
baseRecords = []
augmentRecords = []
for record in records:
# Split out the base fields from the augment fields
baseFields, augmentFields = self._splitFields(record)
# Ignore groups for now
if augmentFields and record.recordType != RecordType.group:
# Create an AugmentRecord
autoScheduleMode = {
AutoScheduleMode.none: "none",
AutoScheduleMode.accept: "accept-always",
AutoScheduleMode.decline: "decline-always",
AutoScheduleMode.acceptIfFree: "accept-if-free",
AutoScheduleMode.declineIfBusy: "decline-if-busy",
AutoScheduleMode.acceptIfFreeDeclineIfBusy: "automatic",
}.get(augmentFields.get(FieldName.autoScheduleMode, None), None)
kwargs = {
"uid": record.uid,
"autoScheduleMode": autoScheduleMode,
}
if FieldName.hasCalendars in augmentFields:
kwargs["enabledForCalendaring"] = augmentFields[FieldName.hasCalendars]
if FieldName.hasContacts in augmentFields:
kwargs["enabledForAddressBooks"] = augmentFields[FieldName.hasContacts]
if FieldName.loginAllowed in augmentFields:
kwargs["enabledForLogin"] = augmentFields[FieldName.loginAllowed]
if FieldName.autoAcceptGroup in augmentFields:
kwargs["autoAcceptGroup"] = augmentFields[FieldName.autoAcceptGroup]
if FieldName.serviceNodeUID in augmentFields:
kwargs["serverID"] = augmentFields[FieldName.serviceNodeUID]
augmentRecord = AugmentRecord(**kwargs)
augmentRecords.append(augmentRecord)
# Create new base records:
baseRecords.append(DirectoryRecord(self._directory, record._baseRecord.fields if hasattr(record, "_baseRecord") else baseFields))
# Apply the augment records
if augmentRecords:
yield self._augmentDB.addAugmentRecords(augmentRecords)
# Apply the base records
if baseRecords:
try:
yield self._directory.updateRecords(baseRecords, create=create)
except NotAllowedError:
pass
def _splitFields(self, record):
"""
Returns a tuple of two dictionaries; the first contains all the non
augment fields, and the second contains all the augment fields.
"""
if record is None:
return None
augmentFields = {}
baseFields = record.fields.copy()
for field in (
FieldName.loginAllowed,
FieldName.hasCalendars, FieldName.hasContacts,
FieldName.autoScheduleMode, FieldName.autoAcceptGroup,
FieldName.serviceNodeUID
):
if field in baseFields:
augmentFields[field] = baseFields[field]
del baseFields[field]
return (baseFields, augmentFields)
@inlineCallbacks
def removeRecords(self, uids):
yield self._augmentDB.removeAugmentRecords(uids)
yield self._directory.removeRecords(uids)
def _assignToField(self, fields, name, value):
"""
Assign a value to a field only if not already present in fields.
"""
field = self.fieldName.lookupByName(name)
if field not in fields:
fields[field] = value
@inlineCallbacks
def _augment(self, record):
if record is None:
returnValue(None)
augmentRecord = yield self._augmentDB.getAugmentRecord(
record.uid,
self.recordTypeToOldName(record.recordType)
)
if augmentRecord is None:
# Augments does not know about this record type, so return
# the original record
returnValue(record)
fields = record.fields.copy()
if augmentRecord:
if record.recordType == RecordType.group:
self._assignToField(fields, "hasCalendars", False)
self._assignToField(fields, "hasContacts", False)
else:
self._assignToField(
fields, "hasCalendars",
augmentRecord.enabledForCalendaring
)
self._assignToField(
fields, "hasContacts",
augmentRecord.enabledForAddressBooks
)
# In the case of XML augments, a missing auto-schedule-mode
# element has the same meaning an element with a value of "default"
# in which case augmentRecord.autoScheduleMode = "default". On
# the record we're augmenting, "default" mode means autoScheduleMode
# gets set to None (distinct from AutoScheduleMode.none!),
# which gets swapped for config.Scheduling.Options.AutoSchedule.DefaultMode
# in checkAttendeeAutoReply().
# ...Except for locations/resources which will default to automatic
autoScheduleMode = {
"none": AutoScheduleMode.none,
"accept-always": AutoScheduleMode.accept,
"decline-always": AutoScheduleMode.decline,
"accept-if-free": AutoScheduleMode.acceptIfFree,
"decline-if-busy": AutoScheduleMode.declineIfBusy,
"automatic": AutoScheduleMode.acceptIfFreeDeclineIfBusy,
}.get(augmentRecord.autoScheduleMode, None)
# Resources/Locations default to automatic
if record.recordType in (
CalRecordType.location,
CalRecordType.resource
):
if autoScheduleMode is None:
autoScheduleMode = AutoScheduleMode.acceptIfFreeDeclineIfBusy
self._assignToField(
fields, "autoScheduleMode",
autoScheduleMode
)
if augmentRecord.autoAcceptGroup is not None:
self._assignToField(
fields, "autoAcceptGroup",
augmentRecord.autoAcceptGroup.decode("utf-8")
)
self._assignToField(
fields, "loginAllowed",
augmentRecord.enabledForLogin
)
self._assignToField(
fields, "serviceNodeUID",
augmentRecord.serverID.decode("utf-8")
)
else:
self._assignToField(fields, "hasCalendars", False)
self._assignToField(fields, "hasContacts", False)
self._assignToField(fields, "loginAllowed", False)
# print("Augmented fields", fields)
# Clone to a new record with the augmented fields
augmentedRecord = AugmentedDirectoryRecord(self, record, fields)
returnValue(augmentedRecord)
@inlineCallbacks
def setAutoScheduleMode(self, record, autoScheduleMode):
augmentRecord = yield self._augmentDB.getAugmentRecord(
record.uid,
self.recordTypeToOldName(record.recordType)
)
if augmentRecord is not None:
autoScheduleMode = {
AutoScheduleMode.none: "none",
AutoScheduleMode.accept: "accept-always",
AutoScheduleMode.decline: "decline-always",
AutoScheduleMode.acceptIfFree: "accept-if-free",
AutoScheduleMode.declineIfBusy: "decline-if-busy",
AutoScheduleMode.acceptIfFreeDeclineIfBusy: "automatic",
}.get(autoScheduleMode)
augmentRecord.autoScheduleMode = autoScheduleMode
yield self._augmentDB.addAugmentRecords([augmentRecord])
class AugmentedDirectoryRecord(DirectoryRecord, CalendarDirectoryRecordMixin):
"""
Augmented directory record.
"""
def __init__(self, service, baseRecord, augmentedFields):
DirectoryRecord.__init__(self, service, augmentedFields)
CalendarDirectoryRecordMixin.__init__(self)
self._baseRecord = baseRecord
@timed
@inlineCallbacks
def members(self):
augmented = []
records = yield self._baseRecord.members()
for record in records:
augmented.append((yield self.service._augment(record)))
returnValue(augmented)
def addMembers(self, memberRecords):
return self._baseRecord.addMembers(memberRecords)
def removeMembers(self, memberRecords):
return self._baseRecord.removeMembers(memberRecords)
def setMembers(self, memberRecords):
return self._baseRecord.setMembers(memberRecords)
@timed
@inlineCallbacks
def groups(self):
augmented = []
def _groupUIDsFor(txn):
return txn.groupUIDsFor(self.uid)
groupUIDs = yield self.service._store.inTransaction(
"AugmentedDirectoryRecord.groups",
_groupUIDsFor
)
for groupUID in groupUIDs:
groupRecord = yield self.service.recordWithUID(
groupUID
)
if groupRecord:
augmented.append((yield self.service._augment(groupRecord)))
returnValue(augmented)
@timed
def verifyPlaintextPassword(self, password):
return self._baseRecord.verifyPlaintextPassword(password)
@timed
def verifyHTTPDigest(self, *args):
return self._baseRecord.verifyHTTPDigest(*args)
@timed
def accessForRecord(self, record):
return self._baseRecord.accessForRecord(record)
| macosforge/ccs-calendarserver | txdav/who/augment.py | Python | apache-2.0 | 18,083 | 0.000608 |
#
#
#
from BCDataStream import *
from enumeration import Enumeration
from base58 import public_key_to_bc_address, hash_160_to_bc_address
import logging
import socket
import time
from util import short_hex, long_hex
def parse_CAddress(vds):
d = {}
d['nVersion'] = vds.read_int32()
d['nTime'] = vds.read_uint32()
d['nServices'] = vds.read_uint64()
d['pchReserved'] = vds.read_bytes(12)
d['ip'] = socket.inet_ntoa(vds.read_bytes(4))
d['port'] = socket.htons(vds.read_uint16())
return d
def deserialize_CAddress(d):
return d['ip']+":"+str(d['port'])+" (lastseen: %s)"%(time.ctime(d['nTime']),)
def parse_setting(setting, vds):
if setting[0] == "f": # flag (boolean) settings
return str(vds.read_boolean())
elif setting == "addrIncoming":
return "" # bitcoin 0.4 purposely breaks addrIncoming setting in encrypted wallets.
elif setting[0:4] == "addr": # CAddress
d = parse_CAddress(vds)
return deserialize_CAddress(d)
elif setting == "nTransactionFee":
return vds.read_int64()
elif setting == "nLimitProcessors":
return vds.read_int32()
return 'unknown setting'
def parse_TxIn(vds):
d = {}
d['prevout_hash'] = vds.read_bytes(32)
d['prevout_n'] = vds.read_uint32()
d['scriptSig'] = vds.read_bytes(vds.read_compact_size())
d['sequence'] = vds.read_uint32()
return d
def deserialize_TxIn(d, transaction_index=None, owner_keys=None):
if d['prevout_hash'] == "\x00"*32:
result = "TxIn: COIN GENERATED"
result += " coinbase:"+d['scriptSig'].encode('hex_codec')
elif transaction_index is not None and d['prevout_hash'] in transaction_index:
p = transaction_index[d['prevout_hash']]['txOut'][d['prevout_n']]
result = "TxIn: value: %f"%(p['value']/1.0e8,)
result += " prev("+long_hex(d['prevout_hash'][::-1])+":"+str(d['prevout_n'])+")"
else:
result = "TxIn: prev("+long_hex(d['prevout_hash'][::-1])+":"+str(d['prevout_n'])+")"
pk = extract_public_key(d['scriptSig'])
result += " pubkey: "+pk
result += " sig: "+decode_script(d['scriptSig'])
if d['sequence'] < 0xffffffff: result += " sequence: "+hex(d['sequence'])
return result
def parse_TxOut(vds):
d = {}
d['value'] = vds.read_int64()
d['scriptPubKey'] = vds.read_bytes(vds.read_compact_size())
return d
def deserialize_TxOut(d, owner_keys=None):
result = "TxOut: value: %f"%(d['value']/1.0e8,)
pk = extract_public_key(d['scriptPubKey'])
result += " pubkey: "+pk
result += " Script: "+decode_script(d['scriptPubKey'])
if owner_keys is not None:
if pk in owner_keys: result += " Own: True"
else: result += " Own: False"
return result
def parse_Transaction(vds):
d = {}
d['version'] = vds.read_int32()
n_vin = vds.read_compact_size()
d['txIn'] = []
for i in xrange(n_vin):
d['txIn'].append(parse_TxIn(vds))
n_vout = vds.read_compact_size()
d['txOut'] = []
for i in xrange(n_vout):
d['txOut'].append(parse_TxOut(vds))
d['lockTime'] = vds.read_uint32()
return d
def deserialize_Transaction(d, transaction_index=None, owner_keys=None):
result = "%d tx in, %d out\n"%(len(d['txIn']), len(d['txOut']))
for txIn in d['txIn']:
result += deserialize_TxIn(txIn, transaction_index) + "\n"
for txOut in d['txOut']:
result += deserialize_TxOut(txOut, owner_keys) + "\n"
return result
def parse_MerkleTx(vds):
d = parse_Transaction(vds)
d['hashBlock'] = vds.read_bytes(32)
n_merkleBranch = vds.read_compact_size()
d['merkleBranch'] = vds.read_bytes(32*n_merkleBranch)
d['nIndex'] = vds.read_int32()
return d
def deserialize_MerkleTx(d, transaction_index=None, owner_keys=None):
tx = deserialize_Transaction(d, transaction_index, owner_keys)
result = "block: "+(d['hashBlock'][::-1]).encode('hex_codec')
result += " %d hashes in merkle branch\n"%(len(d['merkleBranch'])/32,)
return result+tx
def parse_WalletTx(vds):
d = parse_MerkleTx(vds)
n_vtxPrev = vds.read_compact_size()
d['vtxPrev'] = []
for i in xrange(n_vtxPrev):
d['vtxPrev'].append(parse_MerkleTx(vds))
d['mapValue'] = {}
n_mapValue = vds.read_compact_size()
for i in xrange(n_mapValue):
key = vds.read_string()
value = vds.read_string()
d['mapValue'][key] = value
n_orderForm = vds.read_compact_size()
d['orderForm'] = []
for i in xrange(n_orderForm):
first = vds.read_string()
second = vds.read_string()
d['orderForm'].append( (first, second) )
d['fTimeReceivedIsTxTime'] = vds.read_uint32()
d['timeReceived'] = vds.read_uint32()
d['fromMe'] = vds.read_boolean()
d['spent'] = vds.read_boolean()
return d
def deserialize_WalletTx(d, transaction_index=None, owner_keys=None):
result = deserialize_MerkleTx(d, transaction_index, owner_keys)
result += "%d vtxPrev txns\n"%(len(d['vtxPrev']),)
result += "mapValue:"+str(d['mapValue'])
if len(d['orderForm']) > 0:
result += "\n"+" orderForm:"+str(d['orderForm'])
result += "\n"+"timeReceived:"+time.ctime(d['timeReceived'])
result += " fromMe:"+str(d['fromMe'])+" spent:"+str(d['spent'])
return result
# The CAuxPow (auxiliary proof of work) structure supports merged mining.
# A flag in the block version field indicates the structure's presence.
# As of 8/2011, the Original Bitcoin Client does not use it. CAuxPow
# originated in Namecoin; see
# https://github.com/vinced/namecoin/blob/mergedmine/doc/README_merged-mining.md.
def parse_AuxPow(vds):
d = parse_MerkleTx(vds)
n_chainMerkleBranch = vds.read_compact_size()
d['chainMerkleBranch'] = vds.read_bytes(32*n_chainMerkleBranch)
d['chainIndex'] = vds.read_int32()
d['parentBlock'] = parse_BlockHeader(vds)
return d
def parse_BlockHeader(vds):
d = {}
header_start = vds.read_cursor
d['version'] = vds.read_int32()
d['hashPrev'] = vds.read_bytes(32)
d['hashMerkleRoot'] = vds.read_bytes(32)
d['nTime'] = vds.read_uint32()
d['nBits'] = vds.read_uint32()
d['nNonce'] = vds.read_uint32()
header_end = vds.read_cursor
d['__header__'] = vds.input[header_start:header_end]
return d
def parse_Block(vds):
d = parse_BlockHeader(vds)
d['transactions'] = []
# if d['version'] & (1 << 8):
# d['auxpow'] = parse_AuxPow(vds)
nTransactions = vds.read_compact_size()
for i in xrange(nTransactions):
d['transactions'].append(parse_Transaction(vds))
return d
def deserialize_Block(d):
result = "Time: "+time.ctime(d['nTime'])+" Nonce: "+str(d['nNonce'])
result += "\nnBits: 0x"+hex(d['nBits'])
result += "\nhashMerkleRoot: 0x"+d['hashMerkleRoot'][::-1].encode('hex_codec')
result += "\nPrevious block: "+d['hashPrev'][::-1].encode('hex_codec')
result += "\n%d transactions:\n"%len(d['transactions'])
for t in d['transactions']:
result += deserialize_Transaction(t)+"\n"
result += "\nRaw block header: "+d['__header__'].encode('hex_codec')
return result
def parse_BlockLocator(vds):
d = { 'hashes' : [] }
nHashes = vds.read_compact_size()
for i in xrange(nHashes):
d['hashes'].append(vds.read_bytes(32))
return d
def deserialize_BlockLocator(d):
result = "Block Locator top: "+d['hashes'][0][::-1].encode('hex_codec')
return result
opcodes = Enumeration("Opcodes", [
("OP_0", 0), ("OP_PUSHDATA1",76), "OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE", "OP_RESERVED",
"OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7",
"OP_8", "OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
"OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF", "OP_ELSE", "OP_ENDIF", "OP_VERIFY",
"OP_RETURN", "OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP", "OP_2OVER", "OP_2ROT", "OP_2SWAP",
"OP_IFDUP", "OP_DEPTH", "OP_DROP", "OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL", "OP_ROT",
"OP_SWAP", "OP_TUCK", "OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE", "OP_INVERT", "OP_AND",
"OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY", "OP_RESERVED1", "OP_RESERVED2", "OP_1ADD", "OP_1SUB", "OP_2MUL",
"OP_2DIV", "OP_NEGATE", "OP_ABS", "OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL", "OP_DIV",
"OP_MOD", "OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR",
"OP_NUMEQUAL", "OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN",
"OP_GREATERTHAN", "OP_LESSTHANOREQUAL", "OP_GREATERTHANOREQUAL", "OP_MIN", "OP_MAX",
"OP_WITHIN", "OP_RIPEMD160", "OP_SHA1", "OP_SHA256", "OP_HASH160",
"OP_HASH256", "OP_CODESEPARATOR", "OP_CHECKSIG", "OP_CHECKSIGVERIFY", "OP_CHECKMULTISIG",
"OP_CHECKMULTISIGVERIFY",
"OP_NOP1", "OP_NOP2", "OP_NOP3", "OP_NOP4", "OP_NOP5", "OP_NOP6", "OP_NOP7", "OP_NOP8", "OP_NOP9", "OP_NOP10",
("OP_INVALIDOPCODE", 0xFF),
])
def script_GetOp(bytes):
i = 0
while i < len(bytes):
vch = None
opcode = ord(bytes[i])
i += 1
if opcode <= opcodes.OP_PUSHDATA4:
nSize = opcode
if opcode == opcodes.OP_PUSHDATA1:
nSize = ord(bytes[i])
i += 1
elif opcode == opcodes.OP_PUSHDATA2:
(nSize,) = struct.unpack_from('<H', bytes, i)
i += 2
elif opcode == opcodes.OP_PUSHDATA4:
(nSize,) = struct.unpack_from('<I', bytes, i)
i += 4
if i+nSize > len(bytes):
vch = "_INVALID_"+bytes[i:]
i = len(bytes)
else:
vch = bytes[i:i+nSize]
i += nSize
yield (opcode, vch)
def script_GetOpName(opcode):
try:
return (opcodes.whatis(opcode)).replace("OP_", "")
except KeyError:
return "InvalidOp_"+str(opcode)
def decode_script(bytes):
result = ''
for (opcode, vch) in script_GetOp(bytes):
if len(result) > 0: result += " "
if opcode <= opcodes.OP_PUSHDATA4:
result += "%d:"%(opcode,)
result += short_hex(vch)
else:
result += script_GetOpName(opcode)
return result
def match_decoded(decoded, to_match):
if len(decoded) != len(to_match):
return False;
for i in range(len(decoded)):
if to_match[i] == opcodes.OP_PUSHDATA4 and decoded[i][0] <= opcodes.OP_PUSHDATA4:
continue # Opcodes below OP_PUSHDATA4 all just push data onto stack, and are equivalent.
if to_match[i] != decoded[i][0]:
return False
return True
def extract_public_key(bytes):
decoded = [ x for x in script_GetOp(bytes) ]
# non-generated TxIn transactions push a signature
# (seventy-something bytes) and then their public key
# (33 or 65 bytes) onto the stack:
match = [ opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4 ]
if match_decoded(decoded, match):
return public_key_to_bc_address(decoded[1][1])
# The Genesis Block, self-payments, and pay-by-IP-address payments look like:
# 65 BYTES:... CHECKSIG
match = [ opcodes.OP_PUSHDATA4, opcodes.OP_CHECKSIG ]
if match_decoded(decoded, match):
return public_key_to_bc_address(decoded[0][1])
# Pay-by-Bitcoin-address TxOuts look like:
# DUP HASH160 20 BYTES:... EQUALVERIFY CHECKSIG
match = [ opcodes.OP_DUP, opcodes.OP_HASH160, opcodes.OP_PUSHDATA4, opcodes.OP_EQUALVERIFY, opcodes.OP_CHECKSIG ]
if match_decoded(decoded, match):
return hash_160_to_bc_address(decoded[2][1])
# BIP11 TxOuts look like one of these:
# Note that match_decoded is dumb, so OP_1 actually matches OP_1/2/3/etc:
multisigs = [
[ opcodes.OP_1, opcodes.OP_PUSHDATA4, opcodes.OP_1, opcodes.OP_CHECKMULTISIG ],
[ opcodes.OP_2, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_2, opcodes.OP_CHECKMULTISIG ],
[ opcodes.OP_3, opcodes.OP_PUSHDATA4, opcodes.OP_PUSHDATA4, opcodes.OP_3, opcodes.OP_CHECKMULTISIG ]
]
for match in multisigs:
if match_decoded(decoded, match):
return "["+[ public_key_to_bc_address(decoded[i][1]) for i in range(1,len(decoded-1)) ]+"]"
# BIP16 TxOuts look like:
# HASH160 20 BYTES:... EQUAL
match = [ opcodes.OP_HASH160, 0x14, opcodes.OP_EQUAL ]
if match_decoded(decoded, match):
return hash_160_to_bc_address(decoded[1][1], version="\x05")
return "(None)"
| radare/bitcointools | deserialize.py | Python | mit | 11,831 | 0.021384 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Integrates the web-platform-tests test runner with mach.
from __future__ import absolute_import, unicode_literals, print_function
import os
import sys
from mozbuild.base import (
MachCommandBase,
MachCommandConditions as conditions,
MozbuildObject,
)
from mach.decorators import (
CommandProvider,
Command,
)
# This should probably be consolidated with similar classes in other test
# runners.
class InvalidTestPathError(Exception):
"""Exception raised when the test path is not valid."""
class WebPlatformTestsRunner(MozbuildObject):
"""Run web platform tests."""
def setup_kwargs(self, kwargs):
from wptrunner import wptcommandline
build_path = os.path.join(self.topobjdir, 'build')
if build_path not in sys.path:
sys.path.append(build_path)
if kwargs["config"] is None:
kwargs["config"] = os.path.join(self.topsrcdir, 'testing', 'web-platform', 'wptrunner.ini')
if kwargs["binary"] is None:
kwargs["binary"] = self.get_binary_path()
if kwargs["prefs_root"] is None:
kwargs["prefs_root"] = os.path.join(self.topobjdir, '_tests', 'web-platform', "prefs")
if kwargs["certutil_binary"] is None:
kwargs["certutil_binary"] = self.get_binary_path('certutil')
if kwargs["stackfix_dir"] is None:
kwargs["stackfix_dir"] = os.path.split(
self.get_binary_path(validate_exists=False))[0]
here = os.path.split(__file__)[0]
if kwargs["ssl_type"] in (None, "pregenerated"):
if kwargs["ca_cert_path"] is None:
kwargs["ca_cert_path"] = os.path.join(here, "certs", "cacert.pem")
if kwargs["host_key_path"] is None:
kwargs["host_key_path"] = os.path.join(here, "certs", "web-platform.test.key")
if kwargs["host_cert_path"] is None:
kwargs["host_cert_path"] = os.path.join(here, "certs", "web-platform.test.pem")
kwargs["capture_stdio"] = True
kwargs = wptcommandline.check_args(kwargs)
def run_tests(self, **kwargs):
from wptrunner import wptrunner
self.setup_kwargs(kwargs)
logger = wptrunner.setup_logging(kwargs, {"mach": sys.stdout})
result = wptrunner.run_tests(**kwargs)
return int(not result)
def list_test_groups(self, **kwargs):
from wptrunner import wptrunner
self.setup_kwargs(kwargs)
wptrunner.list_test_groups(**kwargs)
class WebPlatformTestsUpdater(MozbuildObject):
"""Update web platform tests."""
def run_update(self, **kwargs):
import update
from update import updatecommandline
if kwargs["config"] is None:
kwargs["config"] = os.path.join(self.topsrcdir, 'testing', 'web-platform', 'wptrunner.ini')
if kwargs["product"] is None:
kwargs["product"] = "firefox"
updatecommandline.check_args(kwargs)
logger = update.setup_logging(kwargs, {"mach": sys.stdout})
try:
update.run_update(logger, **kwargs)
except Exception:
import pdb
import traceback
traceback.print_exc()
# pdb.post_mortem()
class WebPlatformTestsReduce(WebPlatformTestsRunner):
def run_reduce(self, **kwargs):
from wptrunner import reduce
self.setup_kwargs(kwargs)
kwargs["capture_stdio"] = True
logger = reduce.setup_logging(kwargs, {"mach": sys.stdout})
tests = reduce.do_reduce(**kwargs)
if not tests:
logger.warning("Test was not unstable")
for item in tests:
logger.info(item.id)
class WebPlatformTestsCreator(MozbuildObject):
template_prefix = """<!doctype html>
%(documentElement)s<meta charset=utf-8>
"""
template_long_timeout = "<meta name=timeout content=long>\n"
template_body_th = """<title></title>
<script src=/resources/testharness.js></script>
<script src=/resources/testharnessreport.js></script>
<script>
</script>
"""
template_body_reftest = """<title></title>
<link rel=%(match)s href=%(ref)s>
"""
template_body_reftest_wait = """<script src="/common/reftest-wait.js"></script>
"""
def rel_path(self, path):
if path is None:
return
abs_path = os.path.normpath(os.path.abspath(path))
return os.path.relpath(abs_path, self.topsrcdir)
def rel_url(self, rel_path):
upstream_path = os.path.join("testing", "web-platform", "tests")
local_path = os.path.join("testing", "web-platform", "mozilla", "tests")
if rel_path.startswith(upstream_path):
return rel_path[len(upstream_path):].replace(os.path.sep, "/")
elif rel_path.startswith(local_path):
return "/_mozilla" + rel_path[len(local_path):].replace(os.path.sep, "/")
else:
return None
def run_create(self, context, **kwargs):
import subprocess
path = self.rel_path(kwargs["path"])
ref_path = self.rel_path(kwargs["ref"])
if kwargs["ref"]:
kwargs["reftest"] = True
if self.rel_url(path) is None:
print("""Test path %s is not in wpt directories:
testing/web-platform/tests for tests that may be shared
testing/web-platform/mozilla/tests for Gecko-only tests""" % path)
return 1
if ref_path and self.rel_url(ref_path) is None:
print("""Reference path %s is not in wpt directories:
testing/web-platform/tests for tests that may be shared
testing/web-platform/mozilla/tests for Gecko-only tests""" % ref_path)
return 1
if os.path.exists(path) and not kwargs["overwrite"]:
print("Test path already exists, pass --overwrite to replace")
return 1
if kwargs["mismatch"] and not kwargs["reftest"]:
print("--mismatch only makes sense for a reftest")
return 1
if kwargs["wait"] and not kwargs["reftest"]:
print("--wait only makes sense for a reftest")
return 1
args = {"documentElement": "<html class=reftest-wait>\n" if kwargs["wait"] else ""}
template = self.template_prefix % args
if kwargs["long_timeout"]:
template += self.template_long_timeout
if kwargs["reftest"]:
args = {"match": "match" if not kwargs["mismatch"] else "mismatch",
"ref": self.rel_url(ref_path) if kwargs["ref"] else '""'}
template += self.template_body_reftest % args
if kwargs["wait"]:
template += self.template_body_reftest_wait
else:
template += self.template_body_th
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
with open(path, "w") as f:
f.write(template)
if kwargs["no_editor"]:
editor = None
elif kwargs["editor"]:
editor = kwargs["editor"]
elif "VISUAL" in os.environ:
editor = os.environ["VISUAL"]
elif "EDITOR" in os.environ:
editor = os.environ["EDITOR"]
else:
editor = None
proc = None
if editor:
proc = subprocess.Popen("%s %s" % (editor, path), shell=True)
if not kwargs["no_run"]:
p = create_parser_wpt()
wpt_kwargs = vars(p.parse_args(["--manifest-update", path]))
context.commands.dispatch("web-platform-tests", context, **wpt_kwargs)
if proc:
proc.wait()
class WPTManifestUpdater(MozbuildObject):
def run_update(self, check_clean=False, **kwargs):
import manifestupdate
from wptrunner import wptlogging
logger = wptlogging.setup(kwargs, {"mach": sys.stdout})
wpt_dir = os.path.abspath(os.path.join(self.topsrcdir, 'testing', 'web-platform'))
manifestupdate.update(logger, wpt_dir, check_clean)
def create_parser_wpt():
from wptrunner import wptcommandline
return wptcommandline.create_parser(["firefox"])
def create_parser_update():
from update import updatecommandline
return updatecommandline.create_parser()
def create_parser_reduce():
from wptrunner import wptcommandline
return wptcommandline.create_parser_reduce()
def create_parser_create():
import argparse
p = argparse.ArgumentParser()
p.add_argument("--no-editor", action="store_true",
help="Don't try to open the test in an editor")
p.add_argument("-e", "--editor", action="store", help="Editor to use")
p.add_argument("--no-run", action="store_true",
help="Don't try to update the wpt manifest or open the test in a browser")
p.add_argument("--long-timeout", action="store_true",
help="Test should be given a long timeout (typically 60s rather than 10s, but varies depending on environment)")
p.add_argument("--overwrite", action="store_true",
help="Allow overwriting an existing test file")
p.add_argument("-r", "--reftest", action="store_true",
help="Create a reftest rather than a testharness (js) test"),
p.add_argument("-ref", "--reference", dest="ref", help="Path to the reference file")
p.add_argument("--mismatch", action="store_true",
help="Create a mismatch reftest")
p.add_argument("--wait", action="store_true",
help="Create a reftest that waits until takeScreenshot() is called")
p.add_argument("path", action="store", help="Path to the test file")
return p
def create_parser_manifest_update():
import manifestupdate
return manifestupdate.create_parser()
@CommandProvider
class MachCommands(MachCommandBase):
def setup(self):
self._activate_virtualenv()
@Command("web-platform-tests",
category="testing",
conditions=[conditions.is_firefox],
parser=create_parser_wpt)
def run_web_platform_tests(self, **params):
self.setup()
if "test_objects" in params:
for item in params["test_objects"]:
params["include"].append(item["name"])
del params["test_objects"]
wpt_runner = self._spawn(WebPlatformTestsRunner)
if params["list_test_groups"]:
return wpt_runner.list_test_groups(**params)
else:
return wpt_runner.run_tests(**params)
@Command("wpt",
category="testing",
conditions=[conditions.is_firefox],
parser=create_parser_wpt)
def run_wpt(self, **params):
return self.run_web_platform_tests(**params)
@Command("web-platform-tests-update",
category="testing",
parser=create_parser_update)
def update_web_platform_tests(self, **params):
self.setup()
self.virtualenv_manager.install_pip_package('html5lib==0.99')
self.virtualenv_manager.install_pip_package('requests')
wpt_updater = self._spawn(WebPlatformTestsUpdater)
return wpt_updater.run_update(**params)
@Command("wpt-update",
category="testing",
parser=create_parser_update)
def update_wpt(self, **params):
return self.update_web_platform_tests(**params)
@Command("web-platform-tests-reduce",
category="testing",
conditions=[conditions.is_firefox],
parser=create_parser_reduce)
def unstable_web_platform_tests(self, **params):
self.setup()
wpt_reduce = self._spawn(WebPlatformTestsReduce)
return wpt_reduce.run_reduce(**params)
@Command("wpt-reduce",
category="testing",
conditions=[conditions.is_firefox],
parser=create_parser_reduce)
def unstable_wpt(self, **params):
return self.unstable_web_platform_tests(**params)
@Command("web-platform-tests-create",
category="testing",
conditions=[conditions.is_firefox],
parser=create_parser_create)
def create_web_platform_test(self, **params):
self.setup()
wpt_creator = self._spawn(WebPlatformTestsCreator)
wpt_creator.run_create(self._mach_context, **params)
@Command("wpt-create",
category="testing",
conditions=[conditions.is_firefox],
parser=create_parser_create)
def create_wpt(self, **params):
return self.create_web_platform_test(**params)
@Command("wpt-manifest-update",
category="testing",
parser=create_parser_manifest_update)
def wpt_manifest_update(self, **params):
self.setup()
wpt_manifest_updater = self._spawn(WPTManifestUpdater)
return wpt_manifest_updater.run_update(**params)
| Yukarumya/Yukarum-Redfoxes | testing/web-platform/mach_commands.py | Python | mpl-2.0 | 13,041 | 0.00207 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains Facebook Ads Reporting hooks
"""
import time
from enum import Enum
from typing import Any, Dict, List
from cached_property import cached_property
from facebook_business.adobjects.adaccount import AdAccount
from facebook_business.adobjects.adreportrun import AdReportRun
from facebook_business.adobjects.adsinsights import AdsInsights
from facebook_business.api import FacebookAdsApi
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
class JobStatus(Enum):
"""
Available options for facebook async task status
"""
COMPLETED = 'Job Completed'
STARTED = 'Job Started'
RUNNING = 'Job Running'
FAILED = 'Job Failed'
SKIPPED = 'Job Skipped'
class FacebookAdsReportingHook(BaseHook):
"""
Hook for the Facebook Ads API
.. seealso::
For more information on the Facebook Ads API, take a look at the API docs:
https://developers.facebook.com/docs/marketing-apis/
:param facebook_conn_id: Airflow Facebook Ads connection ID
:type facebook_conn_id: str
:param api_version: The version of Facebook API. Default to v6.0
:type api_version: str
"""
def __init__(
self,
facebook_conn_id: str = "facebook_default",
api_version: str = "v6.0",
) -> None:
super().__init__()
self.facebook_conn_id = facebook_conn_id
self.api_version = api_version
self.client_required_fields = ["app_id",
"app_secret",
"access_token",
"account_id"]
def _get_service(self) -> FacebookAdsApi:
""" Returns Facebook Ads Client using a service account"""
config = self.facebook_ads_config
return FacebookAdsApi.init(app_id=config["app_id"],
app_secret=config["app_secret"],
access_token=config["access_token"],
account_id=config["account_id"],
api_version=self.api_version)
@cached_property
def facebook_ads_config(self) -> Dict:
"""
Gets Facebook ads connection from meta db and sets
facebook_ads_config attribute with returned config file
"""
self.log.info("Fetching fb connection: %s", self.facebook_conn_id)
conn = self.get_connection(self.facebook_conn_id)
config = conn.extra_dejson
missings_keys = self.client_required_fields - config.keys()
if missings_keys:
message = "{missings_keys} fields are missing".format(missings_keys=missings_keys)
raise AirflowException(message)
return config
def bulk_facebook_report(
self,
params: Dict[str, Any],
fields: List[str],
sleep_time: int = 5,
) -> List[AdsInsights]:
"""
Pulls data from the Facebook Ads API
:param fields: List of fields that is obtained from Facebook. Found in AdsInsights.Field class.
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:type fields: List[str]
:param params: Parameters that determine the query for Facebook
https://developers.facebook.com/docs/marketing-api/insights/parameters/v6.0
:type fields: Dict[str, Any]
:param sleep_time: Time to sleep when async call is happening
:type sleep_time: int
:return: Facebook Ads API response, converted to Facebook Ads Row objects
:rtype: List[AdsInsights]
"""
api = self._get_service()
ad_account = AdAccount(api.get_default_account_id(), api=api)
_async = ad_account.get_insights(params=params, fields=fields, is_async=True)
while True:
request = _async.api_get()
async_status = request[AdReportRun.Field.async_status]
percent = request[AdReportRun.Field.async_percent_completion]
self.log.info("%s %s completed, async_status: %s", percent, "%", async_status)
if async_status == JobStatus.COMPLETED.value:
self.log.info("Job run completed")
break
if async_status in [JobStatus.SKIPPED.value, JobStatus.FAILED.value]:
message = "{async_status}. Please retry.".format(async_status=async_status)
raise AirflowException(message)
time.sleep(sleep_time)
report_run_id = _async.api_get()["report_run_id"]
report_object = AdReportRun(report_run_id, api=api)
insights = report_object.get_insights()
self.log.info("Extracting data from returned Facebook Ads Iterators")
return list(insights)
| wooga/airflow | airflow/providers/facebook/ads/hooks/ads.py | Python | apache-2.0 | 5,572 | 0.001436 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-21 15:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0006_auto_20160221_1241'),
]
operations = [
migrations.AlterField(
model_name='page',
name='image',
field=models.ImageField(default='none.jpg', upload_to='uploads/'),
),
]
| 501code/Fletcher-Street-Urban-Riding-Club | pages/migrations/0007_auto_20160221_1533.py | Python | mit | 473 | 0 |
# imports/modules
import os
import random
import json
import collections
from PIL import Image
# Convert (r, g, b) into #rrggbb color
def getRGBstring( (r, g, b) ):
s = "#"
s = s + format(r, '02x')
s = s + format(g, '02x')
s = s + format(b, '02x')
return s
def do_compute():
# Open the image
origImgFile = 'res/bryce.jpg'
origImg = Image.open(origImgFile)
# Process the image
# Save the processed information
output = { 'file': origImgFile,
'freq': freq }
f = open("res/freq.json",'w')
s = json.dumps(output, indent = 4)
f.write(s)
| CS205IL-sp15/workbook | demo_colorFreq_start/py/compute.py | Python | mit | 594 | 0.065657 |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import matplotlib.colors
import sys
matplotlib.rc('text', usetex=True)
fontsize = 22
font = {'family' : 'serif',
'serif' : 'Times Roman',
'size' : fontsize}
matplotlib.rc('font', **font)
output_dir = "doc/naacl2016/"
# load in data
data_fname = sys.argv[1]
labels = np.unique(np.loadtxt(data_fname, usecols=[2], dtype='str'))
print labels
data = np.loadtxt(data_fname, converters = {2: lambda y: np.where(labels==y)[0]})
labels = ["LSTM", "USchema"]
colors = ['0.25', '0.6']
width = 4
print data
recall_idx = 0
precision_idx = 1
model_idx = 2
# initialize figures
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
ax1.set_title("LSTM + USchema: Recall vs. Precision", fontsize=fontsize)
ax1.set_xlabel("Recall")
ax1.set_ylabel("Precision")
plt.xlim((0.075, 0.5,))
plt.ylim((0.075, 0.7,))
plt.yticks((0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7))
plt.xticks((0.1, 0.2, 0.3, 0.4, 0.5))
for i in range(len(labels)):
indices = np.where(data[:,model_idx] == i)
ax1.plot(data[indices,recall_idx][0], data[indices,precision_idx][0], label=labels[i], color=colors[i], lw=width)
ax1.yaxis.set_major_formatter(ticker.FuncFormatter(lambda y, pos: ('%.1f')%(y)))
ax1.xaxis.set_major_formatter(ticker.FuncFormatter(lambda x, pos: ('%.1f')%(x)))
# add legend
ax1.legend(fontsize=18)
plt.tight_layout()
fig1.savefig("%s/pr-curve.pdf" % (output_dir), bbox_inches='tight')
plt.show()
| patverga/torch-relation-extraction | bin/analysis/plot-pr-curve.py | Python | mit | 1,479 | 0.01217 |
import numpy as n
import scipy.interpolate
import scipy.ndimage
def congrid(a, newdims, method='linear', centre=False, minusone=False):
'''Arbitrary resampling of source array to new dimension sizes.
Currently only supports maintaining the same number of dimensions.
To use 1-D arrays, first promote them to shape (x,1).
Uses the same parameters and creates the same co-ordinate lookup points
as IDL''s congrid routine, which apparently originally came from a VAX/VMS
routine of the same name.
method:
neighbour - closest value from original data
nearest and linear - uses n x 1-D interpolations using
scipy.interpolate.interp1d
(see Numerical Recipes for validity of use of n 1-D interpolations)
spline - uses ndimage.map_coordinates
centre:
True - interpolation points are at the centres of the bins
False - points are at the front edge of the bin
minusone:
For example- inarray.shape = (i,j) & new dimensions = (x,y)
False - inarray is resampled by factors of (i/x) * (j/y)
True - inarray is resampled by(i-1)/(x-1) * (j-1)/(y-1)
This prevents extrapolation one element beyond bounds of input array.
'''
if not a.dtype in [n.float64, n.float32]:
a = n.cast[float](a)
m1 = n.cast[int](minusone)
ofs = n.cast[int](centre) * 0.5
old = n.array( a.shape )
ndims = len( a.shape )
if len( newdims ) != ndims:
print "[congrid] dimensions error. " \
"This routine currently only support " \
"rebinning to the same number of dimensions."
return None
newdims = n.asarray( newdims, dtype=float )
dimlist = []
if method == 'neighbour':
for i in range( ndims ):
base = n.indices(newdims)[i]
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
cd = n.array( dimlist ).round().astype(int)
newa = a[list( cd )]
return newa
elif method in ['nearest','linear']:
# calculate new dims
for i in range( ndims ):
base = n.arange( newdims[i] )
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
# specify old dims
olddims = [n.arange(i, dtype = n.float) for i in list( a.shape )]
# first interpolation - for ndims = any
mint = scipy.interpolate.interp1d( olddims[-1], a, kind=method )
newa = mint( dimlist[-1] )
trorder = [ndims - 1] + range( ndims - 1 )
for i in range( ndims - 2, -1, -1 ):
newa = newa.transpose( trorder )
mint = scipy.interpolate.interp1d( olddims[i], newa,
kind=method )
newa = mint( dimlist[i] )
if ndims > 1:
# need one more transpose to return to original dimensions
newa = newa.transpose( trorder )
return newa
elif method in ['spline']:
oslices = [ slice(0,j) for j in old ]
oldcoords = n.ogrid[oslices]
nslices = [ slice(0,j) for j in list(newdims) ]
newcoords = n.mgrid[nslices]
newcoords_dims = range(n.rank(newcoords))
#make first index last
newcoords_dims.append(newcoords_dims.pop(0))
newcoords_tr = newcoords.transpose(newcoords_dims)
# makes a view that affects newcoords
newcoords_tr += ofs
deltas = (n.asarray(old) - m1) / (newdims - m1)
newcoords_tr *= deltas
newcoords_tr -= ofs
newa = scipy.ndimage.map_coordinates(a, newcoords)
return newa
else:
print "Congrid error: Unrecognized interpolation type.\n", \
"Currently only \'neighbour\', \'nearest\',\'linear\',", \
"and \'spline\' are supported."
return None
| martindurant/misc | congrid.py | Python | mit | 3,851 | 0.014801 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ReloadDocument
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow
# [START dialogflow_generated_dialogflow_v2_Documents_ReloadDocument_async]
from google.cloud import dialogflow_v2
async def sample_reload_document():
# Create a client
client = dialogflow_v2.DocumentsAsyncClient()
# Initialize request argument(s)
request = dialogflow_v2.ReloadDocumentRequest(
content_uri="content_uri_value",
name="name_value",
)
# Make the request
operation = client.reload_document(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END dialogflow_generated_dialogflow_v2_Documents_ReloadDocument_async]
| googleapis/python-dialogflow | samples/generated_samples/dialogflow_generated_dialogflow_v2_documents_reload_document_async.py | Python | apache-2.0 | 1,621 | 0.000617 |
from kona.linalg.matrices.hessian.basic import BaseHessian
class ReducedSchurPreconditioner(BaseHessian):
"""
An IDF-Schur preconditioner designed to precondition the KKT system for
multidisciplinary design optimization problems formulated using the IDF
architecture.
The preconditioner solves a system defined by the matrix:
.. math::
\\begin{bmatrix} I && A^T \\\\ A && 0 \\end{bmatrix}
This solution is used as the preconditioner to the complete KKT system.
Unlike the complete KKT system, this solution can be performed using FGMRES.
Attributes
----------
krylov : KrylovSolver
cnstr_jac : TotalConstraintJacobian
"""
def __init__(self, vector_factories, optns=None):
super(ReducedSchurPreconditioner, self).__init__(
vector_factories, optns)
self.primal_factory.request_num_vectors(3)
if self.eq_factory is not None:
self.eq_factory.request_num_vectors(1)
else:
raise RuntimeError(
"ReducedSchurPreconditioner >> " +
"Problem must have equality constraints!")
if self.ineq_factory is not None:
self.ineq_factory.request_num_vectors(1)
# initialize the internal FGMRES solver
krylov_opts = {
'subspace_size' : 5,
'rel_tol' : 1e-2,
'check_res' : False,
'check_LS_grad' : False,
'krylov_file' : KonaFile(
'kona_schur.dat', self.primal_factory._memory.rank)}
self.krylov = FGMRES(self.primal_factory, optns=krylov_opts)
# initialize an identity preconditioner
self.eye = IdentityMatrix()
self.precond = self.eye.product
# initialize the total constraint jacobian block
self.cnstr_jac = TotalConstraintJacobian(vector_factories)
# set misc settings
self.diag = 0.0
self._allocated = False
def prod_target(self, in_vec, out_vec):
self.design_prod.equals(in_vec)
self.design_prod.restrict_to_target()
self.cnstr_jac.approx.product(self.design_prod, self.dual_prod)
out_vec.equals(0.0)
self.dual_prod.convert_to_design(out_vec)
def prod_target_t(self, in_vec, out_vec):
self.dual_prod.equals(0.0)
in_vec.convert_to_dual(self.dual_prod)
self.cnstr_jac.T.approx.product(self.dual_prod, out_vec)
out_vec.restrict_to_target()
def linearize(self, at_primal, at_state, scale=1.0):
# store references to the evaluation point
if isinstance(at_primal, CompositePrimalVector):
self.at_design = at_primal.design
else:
self.at_design = at_primal
self.at_state = at_state
# save the scaling on constraint terms
self.scale = scale
# linearize the constraint jacobian
self.cnstr_jac.linearize(self.at_design, self.at_state, scale=self.scale)
# if this is the first linearization, allocate some useful vectors
if not self._allocated:
# design vectors
self.design_prod = self.primal_factory.generate()
self.design_work = []
for i in xrange(2):
self.design_work.append(self.primal_factory.generate())
# dual vectors
self.dual_prod = None
if self.eq_factory is not None and self.ineq_factory is not None:
self.dual_prod = CompositeDualVector(
self.eq_factory.generate(), self.ineq_factory.generate())
else:
self.dual_prod = self.eq_factory.generate()
def product(self, in_vec, out_vec):
# do some aliasing
try:
in_design = in_vec.primal.design
out_design = out_vec.primal.design
out_vec.primal.slack.equals(in_vec.primal.slack)
except Exception:
in_design = in_vec.primal
out_design = out_vec.primal
in_dual = in_vec.dual
out_dual = out_vec.dual
design_work = self.design_work
out_design.equals(0.0)
out_dual.equals(0.0)
# Step 1: Solve A_targ^T * v_dual = u_targ
design_work[1].equals(in_design)
design_work[1].restrict_to_target()
design_work[0].equals(0.0)
self.prod_target_t(design_work[1], design_work[0])
self.krylov.solve(
self.prod_target_t, design_work[1], design_work[0], self.precond)
design_work[0].convert_to_dual(out_dual)
# Step 2: Compute v_x = u_x - A_x^T * v_dual
design_work[0].equals(0.0)
self.cnstr_jac.T.approx.product(out_dual, design_work[0])
out_design.equals_ax_p_by(1., in_design, -1., design_work[0])
out_design.restrict_to_design()
# Step 3: Solve A_targ * v_targ = u_dual - A_x * v_x
self.dual_prod.equals(0.0)
self.cnstr_jac.approx.product(out_design, self.dual_prod)
self.dual_prod.equals_ax_p_by(1., in_dual, -1., self.dual_prod)
self.dual_prod.convert_to_design(design_work[1])
design_work[1].restrict_to_target()
design_work[0].equals(0.0)
self.krylov.solve(
self.prod_target, design_work[1], design_work[0], self.precond)
design_work[0].restrict_to_target()
out_design.plus(design_work[0])
# imports here to prevent circular errors
import numpy as np
from kona.linalg.vectors.composite import CompositePrimalVector
from kona.linalg.vectors.composite import CompositeDualVector
from kona.linalg.matrices.common import IdentityMatrix
from kona.linalg.matrices.hessian import TotalConstraintJacobian
from kona.linalg.solvers.krylov import FGMRES
from kona.linalg.memory import KonaFile | OptimalDesignLab/Kona | src/kona/linalg/matrices/preconds/idf_schur.py | Python | lgpl-3.0 | 5,770 | 0.00312 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('artwork', '0011_auto_20160217_1921'),
]
operations = [
migrations.AlterModelOptions(
name='artwork',
options={'ordering': ('name', 'id')},
),
]
| rogerhil/flaviabernardes | flaviabernardes/flaviabernardes/artwork/migrations/0012_auto_20160831_2148.py | Python | apache-2.0 | 376 | 0 |
# Copyright (c) 2015 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from barbican.tests import utils
from functionaltests.api import base
from functionaltests.api.v1.behaviors import acl_behaviors
from functionaltests.api.v1.behaviors import container_behaviors
from functionaltests.api.v1.behaviors import secret_behaviors
from functionaltests.api.v1.models import acl_models
from functionaltests.api.v1.models import container_models
from functionaltests.api.v1.models import secret_models
from functionaltests.common import config
CONF = config.get_config()
admin_a = CONF.rbac_users.admin_a
creator_a = CONF.rbac_users.creator_a
observer_a = CONF.rbac_users.observer_a
auditor_a = CONF.rbac_users.auditor_a
admin_b = CONF.rbac_users.admin_b
observer_b = CONF.rbac_users.observer_b
def get_acl_default():
return {'read': {'project-access': True}}
def get_acl_one():
return {'read': {'users': ['reader1'], 'project-access': False}}
def get_acl_two():
return {'read': {'users': ['reader2'], 'project-access': False}}
test_data_set_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_get_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 200},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_update_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_delete_secret_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_set_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_get_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 200},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_update_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
test_data_delete_container_acl = {
'with_admin_a': {'user': admin_a, 'expected_return': 200},
'with_creator_a': {'user': creator_a, 'expected_return': 200},
'with_observer_a': {'user': observer_a, 'expected_return': 403},
'with_auditor_a': {'user': auditor_a, 'expected_return': 403},
'with_admin_b': {'user': admin_b, 'expected_return': 403},
'with_observer_b': {'user': observer_b, 'expected_return': 403},
}
@utils.parameterized_test_case
class RBACAclsTestCase(base.TestCase):
"""Functional tests exercising RBAC Policies for ACL Operations"""
def setUp(self):
super(RBACAclsTestCase, self).setUp()
self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client)
self.container_behaviors = container_behaviors.ContainerBehaviors(
self.client)
self.acl_behaviors = acl_behaviors.AclBehaviors(self.client)
def tearDown(self):
self.acl_behaviors.delete_all_created_acls()
self.secret_behaviors.delete_all_created_secrets()
self.container_behaviors.delete_all_created_containers()
super(RBACAclsTestCase, self).tearDown()
@utils.parameterized_dataset(test_data_set_secret_acl)
def test_set_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one(),
user_name=user)
self.assertEqual(expected_return, status)
@utils.parameterized_dataset(test_data_get_secret_acl)
def test_get_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one())
self.assertEqual(200, status)
resp = self.acl_behaviors.get_acl(secret_ref + '/acl', user_name=user)
self.assertEqual(expected_return, resp.status_code)
if expected_return == 200:
self.assertIn('reader1', resp.model.read['users'])
else:
self.assertIsNone(resp.model)
@utils.parameterized_dataset(test_data_update_secret_acl)
def test_update_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one())
self.assertEqual(200, status)
status, model = self.update_secret_acl(secret_ref,
get_acl_two(),
user_name=user)
self.assertEqual(expected_return, status)
get_resp = self.acl_behaviors.get_acl(secret_ref + '/acl',
user_name=admin_a)
if expected_return == 200:
self.assertIsNotNone(model.acl_ref)
# verify update happened
self.assertIn('reader2', get_resp.model.read['users'])
else:
self.assertIsNone(model)
# verify no update happened
self.assertIn('reader1', get_resp.model.read['users'])
@utils.parameterized_dataset(test_data_delete_secret_acl)
def test_delete_secret_acl(self, user, expected_return):
secret_ref = self.store_secret()
status = self.set_secret_acl(secret_ref, get_acl_one())
self.assertEqual(200, status)
resp = self.acl_behaviors.delete_acl(secret_ref + '/acl',
user_name=user)
self.assertEqual(expected_return, resp.status_code)
get_resp = self.acl_behaviors.get_acl(secret_ref + '/acl',
user_name=admin_a)
if expected_return == 200:
# verify delete happened (return to default ACL)
self.assertTrue(get_resp.model.read['project-access'])
else:
# verify no delete happened
self.assertIn('reader1', get_resp.model.read['users'])
@utils.parameterized_dataset(test_data_set_container_acl)
def test_set_container_acl(self, user, expected_return):
container_ref = self.store_container()
status = self.set_container_acl(container_ref, get_acl_one(),
user_name=user)
self.assertEqual(expected_return, status)
@utils.parameterized_dataset(test_data_get_container_acl)
def test_get_container_acl(self, user, expected_return):
container_ref = self.store_container()
status = self.set_container_acl(container_ref, get_acl_one())
self.assertEqual(200, status)
resp = self.acl_behaviors.get_acl(container_ref + '/acl',
user_name=user)
self.assertEqual(expected_return, resp.status_code)
if expected_return == 200:
self.assertIn('reader1', resp.model.read['users'])
else:
self.assertIsNone(resp.model)
@utils.parameterized_dataset(test_data_update_container_acl)
def test_update_container_acl(self, user, expected_return):
container_ref = self.store_container()
status = self.set_container_acl(container_ref, get_acl_one())
self.assertEqual(200, status)
status, model = self.update_container_acl(container_ref,
get_acl_two(),
user_name=user)
self.assertEqual(expected_return, status)
get_resp = self.acl_behaviors.get_acl(container_ref + '/acl',
user_name=admin_a)
if expected_return == 200:
self.assertIsNotNone(model.acl_ref)
# verify update happened
self.assertIn('reader2', get_resp.model.read['users'])
else:
self.assertIsNone(model)
# verify no update happened
self.assertIn('reader1', get_resp.model.read['users'])
@utils.parameterized_dataset(test_data_delete_container_acl)
def test_delete_container_acl(self, user, expected_return):
container_ref = self.store_container()
status = self.set_container_acl(container_ref, get_acl_one())
self.assertEqual(200, status)
resp = self.acl_behaviors.delete_acl(container_ref + '/acl',
user_name=user)
self.assertEqual(expected_return, resp.status_code)
get_resp = self.acl_behaviors.get_acl(container_ref + '/acl',
user_name=admin_a)
if expected_return == 200:
# verify delete happened (return to default ACL)
self.assertTrue(get_resp.model.read['project-access'])
else:
# verify no delete happened
self.assertIn('reader1', get_resp.model.read['users'])
# ----------------------- Helper Functions ---------------------------
def store_secret(self, user_name=creator_a, admin=admin_a):
test_model = secret_models.SecretModel(
**get_default_secret_data())
resp, secret_ref = self.secret_behaviors.create_secret(
test_model, user_name=user_name, admin=admin)
self.assertEqual(201, resp.status_code)
return secret_ref
def set_secret_acl(self, secret_ref, acl, user_name=creator_a):
test_model = acl_models.AclModel(**acl)
resp = self.acl_behaviors.create_acl(
secret_ref, test_model, user_name=user_name)
return resp.status_code
def update_secret_acl(self, secret_ref, acl, user_name=creator_a):
test_model = acl_models.AclModel(**acl)
resp = self.acl_behaviors.update_acl(
secret_ref + '/acl', test_model, user_name=user_name)
return resp.status_code, resp.model
def store_container(self, user_name=creator_a, admin=admin_a):
secret_ref = self.store_secret(user_name=user_name, admin=admin)
test_model = container_models.ContainerModel(
**get_container_req(secret_ref))
resp, container_ref = self.container_behaviors.create_container(
test_model, user_name=user_name, admin=admin)
self.assertEqual(201, resp.status_code)
return container_ref
def set_container_acl(self, container_ref, acl, user_name=creator_a):
test_model = acl_models.AclModel(**acl)
resp = self.acl_behaviors.create_acl(
container_ref, test_model, user_name=user_name)
return resp.status_code
def update_container_acl(self, container_ref, acl, user_name=creator_a):
test_model = acl_models.AclModel(**acl)
resp = self.acl_behaviors.update_acl(
container_ref + '/acl', test_model, user_name=user_name)
return resp.status_code, resp.model
# ----------------------- Support Functions ---------------------------
def get_default_secret_data():
return {
"name": "AES key",
"expiration": "2050-02-28T19:14:44.180394",
"algorithm": "aes",
"bit_length": 256,
"mode": "cbc",
"payload": get_default_payload(),
"payload_content_type": "application/octet-stream",
"payload_content_encoding": "base64",
}
def get_default_payload():
return 'Z0Y2K2xMb0Yzb2hBOWFQUnB0KzZiUT09'
def get_container_req(secret_ref):
return {"name": "testcontainer",
"type": "generic",
"secret_refs": [{'name': 'secret1', 'secret_ref': secret_ref}]}
| openstack/barbican | functionaltests/api/v1/functional/test_acls_rbac.py | Python | apache-2.0 | 13,953 | 0 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
Provides sx typing classes.
"""
from logging import getLogger
from suds import *
from suds.mx import *
from suds.sax import Namespace as NS
from suds.sax.text import Text
log = getLogger(__name__)
class Typer:
"""
Provides XML node typing as either automatic or manual.
@cvar types: A dict of class to xs type mapping.
@type types: dict
"""
types = {
int : ('int', NS.xsdns),
int : ('long', NS.xsdns),
float : ('float', NS.xsdns),
str : ('string', NS.xsdns),
str : ('string', NS.xsdns),
Text : ('string', NS.xsdns),
bool : ('boolean', NS.xsdns),
}
@classmethod
def auto(cls, node, value=None):
"""
Automatically set the node's xsi:type attribute based on either I{value}'s
class or the class of the node's text. When I{value} is an unmapped class,
the default type (xs:any) is set.
@param node: An XML node
@type node: L{sax.element.Element}
@param value: An object that is or would be the node's text.
@type value: I{any}
@return: The specified node.
@rtype: L{sax.element.Element}
"""
if value is None:
value = node.getText()
if isinstance(value, Object):
known = cls.known(value)
if known.name is None:
return node
tm = (known.name, known.namespace())
else:
tm = cls.types.get(value.__class__, cls.types.get(str))
cls.manual(node, *tm)
return node
@classmethod
def manual(cls, node, tval, ns=None):
"""
Set the node's xsi:type attribute based on either I{value}'s
class or the class of the node's text. Then adds the referenced
prefix(s) to the node's prefix mapping.
@param node: An XML node
@type node: L{sax.element.Element}
@param tval: The name of the schema type.
@type tval: str
@param ns: The XML namespace of I{tval}.
@type ns: (prefix, uri)
@return: The specified node.
@rtype: L{sax.element.Element}
"""
xta = ':'.join((NS.xsins[0], 'type'))
node.addPrefix(NS.xsins[0], NS.xsins[1])
if ns is None:
node.set(xta, tval)
else:
ns = cls.genprefix(node, ns)
qname = ':'.join((ns[0], tval))
node.set(xta, qname)
node.addPrefix(ns[0], ns[1])
return node
@classmethod
def genprefix(cls, node, ns):
"""
Generate a prefix.
@param node: An XML node on which the prefix will be used.
@type node: L{sax.element.Element}
@param ns: A namespace needing an unique prefix.
@type ns: (prefix, uri)
@return: The I{ns} with a new prefix.
"""
for n in range(1, 1024):
p = 'ns%d' % n
u = node.resolvePrefix(p, default=None)
if u is None or u == ns[1]:
return (p, ns[1])
raise Exception('auto prefix, exhausted')
@classmethod
def known(cls, object):
try:
md = object.__metadata__
known = md.sxtype
return known
except:
pass
| obsoleter/suds | suds/mx/typer.py | Python | lgpl-3.0 | 4,234 | 0.003779 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='my_project',
version='0.1.0',
packages=['my_project'],
entry_points={
'console_scripts': [
'my_project = crawler.__main__:main'
]
},
install_requires='requests'
)
| Huai-Xv/CSU_FreeClassroom | setup.py | Python | gpl-3.0 | 322 | 0 |
from distutils.dir_util import copy_tree, remove_tree
import os
import shutil
def _copy_function(source, destination):
print('Bootstrapping project at %s' % destination)
copy_tree(source, destination)
def create_app():
cwd = os.getcwd()
game_logic_path = os.path.join(cwd, 'game_logic')
game_app_interface = os.path.join(cwd, 'game_app.py')
app_template = os.path.join(cwd, 'engine', 'app_template')
_game_logic_path_exists = os.path.exists(game_logic_path)
_game_app_interface_exists = os.path.exists(game_app_interface)
if _game_logic_path_exists or _game_app_interface_exists:
answer = input(
'game_app.py or game_logic module already exists. Continue? (y/n). ' +
'\nWARNING: This will remove all contents of game_logic module, use at your own risk:'.upper()
)
if answer == 'y':
if _game_app_interface_exists:
os.remove(game_app_interface)
if _game_logic_path_exists:
remove_tree(game_logic_path)
_copy_function(app_template, cwd)
else:
_copy_function(app_template, cwd)
if not os.path.exists('settings.yaml'):
shutil.copy2('settings.yaml.template', 'settings.yaml')
if not os.path.exists('logging.yaml'):
shutil.copy2('logging.yaml.template', 'logging.yaml')
if __name__ == '__main__':
create_app()
| kollad/turbo-ninja | tools/bootstrap.py | Python | mit | 1,403 | 0.001426 |
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2014 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
import json
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from wirecloud.commons.baseviews import Resource
from wirecloud.commons.utils.http import get_absolute_reverse_url
from wirecloud.fiware.marketAdaptor.marketadaptor import MarketAdaptor
from wirecloud.platform.models import Market, MarketUserData
market_adaptors = {}
def get_market_adaptor(market_user, market):
if market_user is None or market_user == 'public':
market_user = None
username = ''
else:
username = market_user
if market_user not in market_adaptors:
market_adaptors[username] = {}
if market not in market_adaptors[username]:
m = get_object_or_404(Market, user__username=market_user, name=market)
options = json.loads(m.options)
market_adaptors[username][market] = MarketAdaptor(options['url'])
return market_adaptors[username][market]
def get_market_user_data(user, market_user, market_name):
if market_user == 'public':
market_user = None
user_data = {}
for user_data_entry in MarketUserData.objects.filter(market__user__username=market_user, market__name=market_name, user=user):
try:
user_data[user_data_entry.name] = json.loads(user_data_entry.value)
except:
user_data[user_data_entry.name] = None
try:
user_data['idm_token'] = user.social_auth.filter(provider='fiware').get().tokens['access_token']
except:
pass
return user_data
class ServiceCollection(Resource):
def read(self, request, market_user, market_name, store):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
try:
result = adaptor.get_all_services_from_store(store, **user_data)
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; charset=UTF-8')
class ServiceSearchCollection(Resource):
def read(self, request, market_user, market_name, store='', search_string='widget'):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
try:
result = adaptor.full_text_search(store, search_string, user_data)
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; chaset=UTF-8')
class ServiceEntry(Resource):
def read(self, request, market_user, market_name, store, offering_id):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
try:
offering_info = adaptor.get_offering_info(store, offering_id, user_data)[0]
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(offering_info), content_type='application/json; charset=UTF-8')
class AllStoresServiceCollection(Resource):
def read(self, request, market_user, market_name):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
result = {'resources': []}
try:
stores = adaptor.get_all_stores()
for store in stores:
store_services = adaptor.get_all_services_from_store(store['name'], **user_data)
result['resources'].extend(store_services['resources'])
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; charset=UTF-8')
class StoreCollection(Resource):
def read(self, request, market_user, market_name):
adaptor = get_market_adaptor(market_user, market_name)
try:
result = adaptor.get_all_stores()
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; chaset=UTF-8')
def start_purchase(request, market_user, market_name, store):
adaptor = get_market_adaptor(market_user, market_name)
user_data = get_market_user_data(request.user, market_user, market_name)
data = json.loads(request.body)
redirect_uri = get_absolute_reverse_url('wirecloud.fiware.store_redirect_uri', request)
try:
result = adaptor.start_purchase(store, data['offering_url'], redirect_uri, **user_data)
except:
return HttpResponse(status=502)
return HttpResponse(json.dumps(result), content_type='application/json; chaset=UTF-8')
| sixuanwang/SAMSaaS | wirecloud-develop/src/wirecloud/fiware/marketAdaptor/views.py | Python | gpl-2.0 | 5,496 | 0.00455 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Country'
db.create_table('locations_country', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['Country'])
# Adding model 'Region'
db.create_table('locations_region', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('country', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Country'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['Region'])
# Adding model 'City'
db.create_table('locations_city', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('region', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Region'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
))
db.send_create_signal('locations', ['City'])
def backwards(self, orm):
# Deleting model 'Country'
db.delete_table('locations_country')
# Deleting model 'Region'
db.delete_table('locations_region')
# Deleting model 'City'
db.delete_table('locations_city')
models = {
'locations.city': {
'Meta': {'object_name': 'City'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Region']"})
},
'locations.country': {
'Meta': {'object_name': 'Country'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'locations.region': {
'Meta': {'object_name': 'Region'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['locations.Country']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['locations']
| MauHernandez/cyclope | cyclope/apps/locations/migrations/0001_initial.py | Python | gpl-3.0 | 2,676 | 0.007848 |
class intSet(object):
"""An intSet is a set of integers
The value is represented by a list of ints, self.vals.
Each integer in the set occurs in self.vals exactly once."""
def __init__(self):
"""Create an empty set of integers"""
self.vals = []
def __str__(self):
"""Returns a string representation of self"""
self.vals.sort()
return '{' + ','.join([str(e) for e in self.vals]) + '}'
def __len__(self):
return len(self.vals)
def intersect(self, other):
result = intSet()
for e in self.vals:
if e in other.vals:
result.insert(e)
return result
def insert(self, e):
"""Assumes e is an integer and inserts e into self"""
if not e in self.vals:
self.vals.append(e)
def member(self, e):
"""Assumes e is an integer
Returns True if e is in self, and False otherwise"""
return e in self.vals
def remove(self, e):
"""Assumes e is an integer and removes e from self
Raises ValueError if e is not in self"""
try:
self.vals.remove(e)
except:
raise ValueError(str(e) + ' not found')
s = intSet()
print s
s.insert(3)
s.insert(4)
s.insert(9)
s.insert(5)
print s
t = intSet()
print t
t.insert(1)
t.insert(4)
t.insert(15)
t.insert(90)
print t
print t.__len__()
print len(t)
print s.intersect(t)
| pparacch/PlayingWithPython | computationAndProgrammingUsingPython/src/classesAndObjects/week6_L11_part05.py | Python | mit | 1,393 | 0.010768 |
# -*- coding: utf-8 -*-
import os
import unittest
from manolo_scraper.spiders.mincu import MincuSpider
from utils import fake_response_from_file
class TestMincuSpider(unittest.TestCase):
def setUp(self):
self.spider = MincuSpider()
def test_parse_item(self):
filename = os.path.join('data/mincu', '18-08-2015.html')
items = self.spider.parse(fake_response_from_file(filename, meta={'date': u'18/08/2015'}))
item = next(items)
self.assertEqual(item.get('full_name'), u'INGRID BARRIONUEVO ECHEGARAY')
self.assertEqual(item.get('time_start'), u'16:40')
self.assertEqual(item.get('institution'), u'mincu')
self.assertEqual(item.get('id_document'), u'DNI')
self.assertEqual(item.get('id_number'), u'10085172')
self.assertEqual(item.get('entity'), u'PARTICULAR')
self.assertEqual(item.get('reason'), u'REUNIÓN DE TRABAJO')
self.assertEqual(item.get('host_name'), u'JOIZ ELIZABETH DOBLADILLO ORTIZ')
self.assertEqual(item.get('title'), u'[SERVICIOS DE UN ASISTENTE EN COMUNICACIONES]')
self.assertEqual(item.get('office'), u'QHAPAQ ÑAN')
self.assertEqual(item.get('time_end'), u'16:53')
self.assertEqual(item.get('date'), u'2015-08-18')
number_of_items = 1 + sum(1 for x in items)
self.assertEqual(number_of_items, 15)
| aniversarioperu/django-manolo | scrapers/tests/test_mincu_spider.py | Python | bsd-3-clause | 1,379 | 0.002913 |
# Copyright (c) 2020, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django.conf.urls import url, include
urlpatterns = [
url(r'^campaign/', include('survey.urls.api.campaigns')),
url(r'^', include('survey.urls.api.matrix')), # No trailing '/'
# because of PATH_RE.
url(r'^sample/', include('survey.urls.api.sample')),
]
| djaodjin/djaodjin-survey | survey/urls/api/__init__.py | Python | bsd-2-clause | 1,664 | 0.000601 |
import unittest
import pandas as pd
from pandas_schema import Column
from pandas_schema.validation import CanConvertValidation, LeadingWhitespaceValidation, TrailingWhitespaceValidation
class SingleValidationColumn(unittest.TestCase):
"""
Test a column with one single validation
"""
NAME = 'col1'
col = Column(NAME, [CanConvertValidation(int)], allow_empty=False)
ser = pd.Series([
'a',
'b',
'c'
])
def test_name(self):
self.assertEqual(self.col.name, self.NAME, 'A Column does not store its name correctly')
def test_outputs(self):
results = self.col.validate(self.ser)
self.assertEqual(len(results), len(self.ser), 'A Column produces the wrong number of errors')
for i in range(2):
self.assertTrue(any([r.row == i for r in results]), 'A Column does not report errors for every row')
class DoubleValidationColumn(unittest.TestCase):
"""
Test a column with two different validations
"""
NAME = 'col1'
col = Column(NAME, [TrailingWhitespaceValidation(), LeadingWhitespaceValidation()], allow_empty=False)
ser = pd.Series([
' a ',
' b ',
' c '
])
def test_outputs(self):
results = self.col.validate(self.ser)
# There should be 6 errors, 2 for each row
self.assertEqual(len(results), 2 * len(self.ser), 'A Column produces the wrong number of errors')
for i in range(2):
in_row = [r for r in results if r.row == i]
self.assertEqual(len(in_row), 2, 'A Column does not report both errors for every row')
class AllowEmptyColumn(unittest.TestCase):
"""
Test a column with one single validation that allows empty columns
"""
NAME = 'col1'
col = Column(NAME, [CanConvertValidation(int)], allow_empty=True)
ser = pd.Series([
'',
])
def test_outputs(self):
results = self.col.validate(self.ser)
self.assertEqual(len(results), 0, 'allow_empty is not allowing empty columns')
| TMiguelT/PandasSchema | test/test_column.py | Python | gpl-3.0 | 2,051 | 0.003901 |
"""Executor util helpers."""
from __future__ import annotations
from concurrent.futures import ThreadPoolExecutor
import contextlib
import logging
import queue
import sys
from threading import Thread
import time
import traceback
from .thread import async_raise
_LOGGER = logging.getLogger(__name__)
MAX_LOG_ATTEMPTS = 2
_JOIN_ATTEMPTS = 10
EXECUTOR_SHUTDOWN_TIMEOUT = 10
def _log_thread_running_at_shutdown(name: str, ident: int) -> None:
"""Log the stack of a thread that was still running at shutdown."""
frames = sys._current_frames() # pylint: disable=protected-access
stack = frames.get(ident)
formatted_stack = traceback.format_stack(stack)
_LOGGER.warning(
"Thread[%s] is still running at shutdown: %s",
name,
"".join(formatted_stack).strip(),
)
def join_or_interrupt_threads(
threads: set[Thread], timeout: float, log: bool
) -> set[Thread]:
"""Attempt to join or interrupt a set of threads."""
joined = set()
timeout_per_thread = timeout / len(threads)
for thread in threads:
thread.join(timeout=timeout_per_thread)
if not thread.is_alive() or thread.ident is None:
joined.add(thread)
continue
if log:
_log_thread_running_at_shutdown(thread.name, thread.ident)
with contextlib.suppress(SystemError):
# SystemError at this stage is usually a race condition
# where the thread happens to die right before we force
# it to raise the exception
async_raise(thread.ident, SystemExit)
return joined
class InterruptibleThreadPoolExecutor(ThreadPoolExecutor):
"""A ThreadPoolExecutor instance that will not deadlock on shutdown."""
def shutdown(self, *args, **kwargs) -> None: # type: ignore
"""Shutdown backport from cpython 3.9 with interrupt support added."""
with self._shutdown_lock: # type: ignore[attr-defined]
self._shutdown = True
# Drain all work items from the queue, and then cancel their
# associated futures.
while True:
try:
work_item = self._work_queue.get_nowait()
except queue.Empty:
break
if work_item is not None:
work_item.future.cancel()
# Send a wake-up to prevent threads calling
# _work_queue.get(block=True) from permanently blocking.
self._work_queue.put(None)
# The above code is backported from python 3.9
#
# For maintainability join_threads_or_timeout is
# a separate function since it is not a backport from
# cpython itself
#
self.join_threads_or_timeout()
def join_threads_or_timeout(self) -> None:
"""Join threads or timeout."""
remaining_threads = set(self._threads) # type: ignore[attr-defined]
start_time = time.monotonic()
timeout_remaining: float = EXECUTOR_SHUTDOWN_TIMEOUT
attempt = 0
while True:
if not remaining_threads:
return
attempt += 1
remaining_threads -= join_or_interrupt_threads(
remaining_threads,
timeout_remaining / _JOIN_ATTEMPTS,
attempt <= MAX_LOG_ATTEMPTS,
)
timeout_remaining = EXECUTOR_SHUTDOWN_TIMEOUT - (
time.monotonic() - start_time
)
if timeout_remaining <= 0:
return
| home-assistant/home-assistant | homeassistant/util/executor.py | Python | apache-2.0 | 3,555 | 0 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^new', views.add_post_form, name='add'),
url(r'^post/(?P<slug>[\w-]+)/$', views.post_view, name='post'),
]
| CallMeMhz/megablog | src/blog/urls.py | Python | gpl-3.0 | 235 | 0 |
#!/usr/bin/env python
#coding: utf8
#get the list of the scanned election results papers ( proces verbaux )
# sudo apt-get install python-setuptools
# easy_install beautifulsoup4
import urllib2
from bs4 import BeautifulSoup
from string import maketrans
from string import whitespace
import csv
import time
import json
import os #for the se of wget command to download the files
source_url = "http://www.isie.tn/index.php/fr/243-proces-verbaux-de-depouillement-et-les-decisions-de-correction-y-afferentes.html"
# using urllib2 to read the remote html page
html = urllib2.urlopen(source_url).read()
#using BeautifulSoup library for pulling data out of HTML
soup = BeautifulSoup(html)
#gettting all the disticts represented by a directory tree
main_directory =soup.find('ul', class_="php-file-tree")
#print main_directory
districts = main_directory.find_all('li', recursive=False)
for district in districts :
district_link = district.findChild('a');
district_name = district_link.contents[0].encode('utf-8').strip().replace(' ', '_')
if not os.path.exists(district_name):
os.makedirs(district_name)
#delegation : electoral level 2
delegation_directory= district.findChild('ul',recursive=False)
if delegation_directory == None :
print "Error:data unavailable, Level: district , name:"+district_name
else:
delegations = delegation_directory.find_all('li', recursive=False)
#Processing delegation level
for delegation in delegations :
delegation_link = delegation.findChild('a');
delegation_name = delegation_link.contents[0].encode('utf-8').strip().replace(' ', '_')
if not os.path.exists(district_name+ "/" +delegation_name):
os.makedirs(district_name+ "/" +delegation_name)
polling_center_directory= delegation.findChild('ul',recursive=False)
if polling_center_directory == None :
print "Error:data unavailable, Level: delegation , name:"+delegation_name
else:
polling_centers = polling_center_directory.find_all('li', class_='pft-directory', recursive=False)
#Processing polling center level
for polling_center in polling_centers:
polling_center_link = polling_center.findChild('a');
polling_center_name = polling_center_link.contents[0].encode('utf-8').strip().replace(' ', '_')
print polling_center_name
if not os.path.exists(district_name+ "/" +delegation_name+ "/" + polling_center_name):
os.makedirs(district_name+ "/" +delegation_name+ "/" + polling_center_name)
#find files list
files_directory= polling_center.findChild('ul',recursive=False)
if files_directory == None :
print "Error:data unavailable, Level: Polling_center , name:"+ polling_center_name
else:
files = files_directory.find_all('li', class_='pft-file', recursive=False)
# pv stands for Proces Verbal which in english means protocol -- in election lexique
for pv in files:
pv_link = pv.findChild('a', href=True)
pv_ref = pv_link['href']
file_link = "http://isie.tn"+ pv_ref
fullurl = urllib2.quote(file_link.encode('utf-8'), safe="%/:=&?~#+!$,;'@()*[]")
download_path= district_name+ "/" +delegation_name+ "/" + polling_center_name
download_command= "wget -P " + download_path + " " + fullurl
os.system(download_command)
| radproject/protocols | script.py | Python | cc0-1.0 | 3,299 | 0.036072 |
from otm1_migrator.migration_rules.standard_otm1 import MIGRATION_RULES
from treemap.models import ITreeCodeOverride, ITreeRegion, User
UDFS = {
'plot': {
'owner_additional_id': {
'udf.name': 'Owner Additional Id'
},
'owner_additional_properties': {
'udf.name': 'Owner Additional Properties'
},
'type': {
'udf.name': 'Plot Type',
'udf.choices': ['Well/Pit', 'Median/Island', 'Tree Lawn',
'Park', 'Planter', 'Other', 'Yard',
'Natural Area']
},
'powerline_conflict_potential': {
'udf.name': 'Powerlines Overhead',
'udf.choices': ['Yes', 'No', 'Unknown']
},
'sidewalk_damage': {
'udf.name': 'Sidewalk Damage',
'udf.choices': ['Minor or No Damage', 'Raised More Than 3/4 Inch']
}
},
'tree': {
'sponsor': {'udf.name': 'Sponsor'},
'projects': {'udf.name': 'Projects'},
'canopy_condition': {
'udf.name': 'Canopy Condition',
'udf.choices': ['Full - No Gaps',
'Small Gaps (up to 25% missing)',
'Moderate Gaps (up to 50% missing)',
'Large Gaps (up to 75% missing)',
'Little or None (up to 100% missing)']
},
'condition': {
'udf.name': 'Tree Condition',
'udf.choices': ['Dead', 'Critical', 'Poor',
'Fair', 'Good',
'Very Good', 'Excellent']
}
}
}
SORT_ORDER_INDEX = {
'Bucks': 3,
'Burlington': 4,
'Camden': 5,
'Chester': 6,
'Delaware': 7,
'Gloucester': 8,
'Kent': 9,
'Mercer': 10,
'Montgomery': 11,
'New Castle': 12,
'Salem': 13,
'Sussex': 14,
}
def create_override(species_obj, species_dict):
for region in ['NoEastXXX', 'PiedmtCLT']:
override = ITreeCodeOverride(
instance_species_id=species_obj.pk,
region=ITreeRegion.objects.get(code=region),
itree_code=species_dict['fields']['itree_code'])
override.save_with_user(User.system_user())
return species_obj
MIGRATION_RULES['species']['postsave_actions'] = (MIGRATION_RULES['species']
.get('postsave_actions', [])
+ [create_override])
def mutate_boundary(boundary_obj, boundary_dict):
otm1_fields = boundary_dict.get('fields')
if ((boundary_obj.name.find('County') != -1
or boundary_obj.name == 'Philadelphia')):
boundary_obj.category = 'County'
boundary_obj.sort_order = 1
elif otm1_fields['county'] == 'Philadelphia':
boundary_obj.category = 'Philadelphia Neighborhood'
boundary_obj.sort_order = 2
else:
county = otm1_fields['county']
boundary_obj.category = county + ' Township'
boundary_obj.sort_order = SORT_ORDER_INDEX[county]
return boundary_obj
MIGRATION_RULES['boundary']['presave_actions'] = (MIGRATION_RULES['boundary']
.get('presave_actions', [])
+ [mutate_boundary])
MIGRATION_RULES['species']['missing_fields'] |= {'other'}
# these fields don't exist in the ptm fixture, so can't be specified
# as a value that gets discarded. Remove them.
MIGRATION_RULES['species']['removed_fields'] -= {'family'}
MIGRATION_RULES['tree']['removed_fields'] -= {'pests', 'url'}
# this field doesn't exist, so can no longer have a to -> from def
del MIGRATION_RULES['species']['renamed_fields']['other_part_of_name']
| johnsonc/OTM2 | opentreemap/otm1_migrator/migration_rules/philadelphia.py | Python | gpl-3.0 | 3,768 | 0.000531 |
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from os import listdir, unlink
from os.path import join as path_join
from unittest import main
from uuid import uuid4
from swiftclient import client
from swift.common import direct_client
from swift.common.exceptions import ClientException
from swift.common.utils import hash_path, readconf
from swift.obj.diskfile import write_metadata, read_metadata, get_data_dir
from test.probe.common import ReplProbeTest
RETRIES = 5
def get_data_file_path(obj_dir):
files = []
# We might need to try a few times if a request hasn't yet settled. For
# instance, a PUT can return success when just 2 of 3 nodes has completed.
for attempt in xrange(RETRIES + 1):
try:
files = sorted(listdir(obj_dir), reverse=True)
break
except Exception:
if attempt < RETRIES:
time.sleep(1)
else:
raise
for filename in files:
return path_join(obj_dir, filename)
class TestObjectFailures(ReplProbeTest):
def _setup_data_file(self, container, obj, data):
client.put_container(self.url, self.token, container,
headers={'X-Storage-Policy':
self.policy.name})
client.put_object(self.url, self.token, container, obj, data)
odata = client.get_object(self.url, self.token, container, obj)[-1]
self.assertEquals(odata, data)
opart, onodes = self.object_ring.get_nodes(
self.account, container, obj)
onode = onodes[0]
node_id = (onode['port'] - 6000) / 10
device = onode['device']
hash_str = hash_path(self.account, container, obj)
obj_server_conf = readconf(self.configs['object-server'][node_id])
devices = obj_server_conf['app:object-server']['devices']
obj_dir = '%s/%s/%s/%s/%s/%s/' % (devices, device,
get_data_dir(self.policy),
opart, hash_str[-3:], hash_str)
data_file = get_data_file_path(obj_dir)
return onode, opart, data_file
def run_quarantine(self):
container = 'container-%s' % uuid4()
obj = 'object-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj,
'VERIFY')
metadata = read_metadata(data_file)
metadata['ETag'] = 'badetag'
write_metadata(data_file, metadata)
odata = direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})[-1]
self.assertEquals(odata, 'VERIFY')
try:
direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_range_etag(self):
container = 'container-range-%s' % uuid4()
obj = 'object-range-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj,
'RANGE')
metadata = read_metadata(data_file)
metadata['ETag'] = 'badetag'
write_metadata(data_file, metadata)
base_headers = {'X-Backend-Storage-Policy-Index': self.policy.idx}
for header, result in [({'Range': 'bytes=0-2'}, 'RAN'),
({'Range': 'bytes=1-11'}, 'ANGE'),
({'Range': 'bytes=0-11'}, 'RANGE')]:
req_headers = base_headers.copy()
req_headers.update(header)
odata = direct_client.direct_get_object(
onode, opart, self.account, container, obj,
headers=req_headers)[-1]
self.assertEquals(odata, result)
try:
direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_zero_byte_get(self):
container = 'container-zbyte-%s' % uuid4()
obj = 'object-zbyte-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj, 'DATA')
metadata = read_metadata(data_file)
unlink(data_file)
with open(data_file, 'w') as fpointer:
write_metadata(fpointer, metadata)
try:
direct_client.direct_get_object(
onode, opart, self.account, container, obj, conn_timeout=1,
response_timeout=1, headers={'X-Backend-Storage-Policy-Index':
self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_zero_byte_head(self):
container = 'container-zbyte-%s' % uuid4()
obj = 'object-zbyte-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj, 'DATA')
metadata = read_metadata(data_file)
unlink(data_file)
with open(data_file, 'w') as fpointer:
write_metadata(fpointer, metadata)
try:
direct_client.direct_head_object(
onode, opart, self.account, container, obj, conn_timeout=1,
response_timeout=1, headers={'X-Backend-Storage-Policy-Index':
self.policy.idx})
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def run_quarantine_zero_byte_post(self):
container = 'container-zbyte-%s' % uuid4()
obj = 'object-zbyte-%s' % uuid4()
onode, opart, data_file = self._setup_data_file(container, obj, 'DATA')
metadata = read_metadata(data_file)
unlink(data_file)
with open(data_file, 'w') as fpointer:
write_metadata(fpointer, metadata)
try:
headers = {'X-Object-Meta-1': 'One', 'X-Object-Meta-Two': 'Two',
'X-Backend-Storage-Policy-Index': self.policy.idx}
direct_client.direct_post_object(
onode, opart, self.account,
container, obj,
headers=headers,
conn_timeout=1,
response_timeout=1)
raise Exception("Did not quarantine object")
except ClientException as err:
self.assertEquals(err.http_status, 404)
def test_runner(self):
self.run_quarantine()
self.run_quarantine_range_etag()
self.run_quarantine_zero_byte_get()
self.run_quarantine_zero_byte_head()
self.run_quarantine_zero_byte_post()
if __name__ == '__main__':
main()
| kun--hust/sccloud | test/probe/test_object_failures.py | Python | apache-2.0 | 7,804 | 0 |
# -*- coding: utf-8 -*-
"""
pyvisa.visa
~~~~~~~~~~~
Module to provide an import shortcut for the most common VISA operations.
This file is part of PyVISA.
:copyright: 2014 by PyVISA Authors, see AUTHORS for more details.
:license: MIT, see COPYING for more details.
"""
from __future__ import division, unicode_literals, print_function, absolute_import
from pyvisa import logger, __version__, log_to_screen, constants
from pyvisa.highlevel import ResourceManager
from pyvisa.errors import (Error, VisaIOError, VisaIOWarning, VisaTypeError,
UnknownHandler, OSNotSupported, InvalidBinaryFormat,
InvalidSession, LibraryError)
# This is needed to registry all resources.
from pyvisa.resources import Resource
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='PyVISA command-line utilities')
parser.add_argument('--backend', '-b', dest='backend', action='store', default=None,
help='backend to be used (default: ni)')
subparsers = parser.add_subparsers(title='command', dest='command')
info_parser = subparsers.add_parser('info', help='print information to diagnose PyVISA')
console_parser = subparsers.add_parser('shell', help='start the PyVISA console')
args = parser.parse_args()
if args.command == 'info':
from pyvisa import util
util.get_debug_info()
elif args.command == 'shell':
from pyvisa import shell
shell.main('@' + args.backend if args.backend else '')
| MatthieuDartiailh/pyvisa | visa.py | Python | mit | 1,581 | 0.003163 |
from cornflake import fields
from cornflake.exceptions import ValidationError
from cornflake.sqlalchemy_orm import ModelSerializer, ReferenceField
from cornflake.validators import max_length, min_, none_if_blank, optional, required
from radar.api.serializers.common import (
MetaMixin,
PatientMixin,
SourceMixin,
StringLookupField,
)
from radar.api.serializers.validators import valid_date_for_patient
from radar.models.medications import (
CurrentMedication,
Drug,
DrugGroup,
Medication,
MEDICATION_DOSE_UNITS,
MEDICATION_ROUTES
)
class DrugGroupSerializer(ModelSerializer):
class Meta(object):
model_class = DrugGroup
exclude = ['parent_drug_group_id']
class DrugGroupField(ReferenceField):
model_class = DrugGroup
serializer_class = DrugGroupSerializer
class DrugSerializer(ModelSerializer):
drug_group = DrugGroupField()
class Meta(object):
model_class = Drug
exclude = ['drug_group_id']
class DrugField(ReferenceField):
model_class = Drug
serializer_class = DrugSerializer
class MedicationSerializer(PatientMixin, SourceMixin, MetaMixin, ModelSerializer):
from_date = fields.DateField()
to_date = fields.DateField(required=False)
drug = DrugField(required=False)
dose_quantity = fields.FloatField(required=False, validators=[min_(0)])
dose_unit = StringLookupField(MEDICATION_DOSE_UNITS, required=False)
frequency = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(1000)])
route = StringLookupField(MEDICATION_ROUTES, required=False)
drug_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
dose_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
class Meta(object):
model_class = Medication
exclude = ['drug_id']
validators = [
valid_date_for_patient('from_date'),
valid_date_for_patient('to_date'),
]
def pre_validate(self, data):
# Coded drug overrides drug free-text
if data['drug']:
data['drug_text'] = None
return data
def validate(self, data):
data = super(MedicationSerializer, self).validate(data)
# To date must be after from date
if data['to_date'] is not None and data['to_date'] < data['from_date']:
raise ValidationError({'to_date': 'Must be on or after from date.'})
# Must specify either a coded drug or a free-text drug
if data['drug'] is None and data['drug_text'] is None:
raise ValidationError({
'drug': 'Must specify a drug.',
'drug_text': 'Must specify a drug.',
})
# Coded dose quantities must have a unit
if data['dose_quantity'] is not None:
self.run_validators_on_field(data, 'dose_unit', [required()])
return data
class CurrentMedicationSerializer(PatientMixin, SourceMixin, MetaMixin, ModelSerializer):
date_recorded = fields.DateField()
drug = DrugField(required=False)
dose_quantity = fields.FloatField(required=False, validators=[min_(0)])
dose_unit = StringLookupField(MEDICATION_DOSE_UNITS, required=False)
frequency = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(1000)])
route = StringLookupField(MEDICATION_ROUTES, required=False)
drug_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
dose_text = fields.StringField(required=False, validators=[none_if_blank(), optional(), max_length(10000)])
class Meta(object):
model_class = CurrentMedication
exclude = ['drug_id']
validators = [
valid_date_for_patient('date_recorded'),
]
def pre_validate(self, data):
# Coded drug overrides drug free-text
if data['drug']:
data['drug_text'] = None
return data
def validate(self, data):
data = super(CurrentMedicationSerializer, self).validate(data)
# Must specify either a coded drug or a free-text drug
if data['drug'] is None and data['drug_text'] is None:
raise ValidationError({
'drug': 'Must specify a drug.',
'drug_text': 'Must specify a drug.',
})
# Coded dose quantities must have a unit
if data['dose_quantity'] is not None:
self.run_validators_on_field(data, 'dose_unit', [required()])
return data
| renalreg/radar | radar/api/serializers/medications.py | Python | agpl-3.0 | 4,625 | 0.002162 |
__author__ = 'j'
from tkinter import ttk
from tkinter import *
from TreeView import *
def buildFrame(root):
sub_1 = Frame(root)
sub_1.pack(side=LEFT,anchor = 'w', fill='both', expand=True)
sub_1_1 = Frame(sub_1)
sub_1_1.pack(side=TOP, anchor='n',fill='both',expand=True)
sub_1_2 = Frame(sub_1)
sub_1_2.pack(side=BOTTOM,anchor = 's',expand=False,fill='x')
sub_2 = Frame(root)
sub_2.pack(side=RIGHT, anchor='w', fill='both', expand=True)
sub_2_1 = Frame(sub_2)
sub_2_1.pack(side=LEFT, anchor='w',expand=False)
sub_2_2 = Frame(sub_2)
sub_2_2.pack(side=RIGHT,anchor='e',fill='both',expand=True)
sub_2_2_1 = Frame(sub_2_2)
sub_2_2_1.pack(side=TOP,anchor='e',fill='both',expand=True)
return sub_1, sub_2, sub_1_1, sub_1_2, sub_2_1, sub_2_2, sub_2_2_1
def buildTree(sub_1_1, sub_1_2):
treeview = ttk.Treeview(master=sub_1_1,columns=("fullpath", "type"), displaycolumns='')
treeview.grid(column=0, row=0, sticky='nsew', in_=sub_1_1)
treeview.bind('<<TreeviewOpen>>', update_tree)
vsb = Scrollbar(orient="vertical", command=treeview.yview)
hsb = Scrollbar(orient="horizontal", command=treeview.xview)
treeview.configure(yscrollcommand=vsb.set, xscrollcommand=hsb.set)
vsb.grid(column=1, row=0, sticky='ns', in_=sub_1_1)
hsb.grid(column=0, row=1, sticky='ew', in_=sub_1_1)
sub_1_1.grid_columnconfigure(0, weight=1)
sub_1_1.grid_rowconfigure(0, weight=1)
entry = Entry(master=sub_1_2)
entry.pack(side=LEFT,anchor="w",expand=True,fill='x')
return treeview, entry
def buildSelectedBox(sub_2_2_1):
selected = Listbox(master=sub_2_2_1,selectmode=EXTENDED)
vsb = Scrollbar(orient="vertical", command=selected.yview)
hsb = Scrollbar(orient="horizontal", command=selected.xview)
selected.grid(column=0, row=0, sticky='nsew', in_=sub_2_2_1)
vsb.grid(column=1, row=0, sticky='ns', in_=sub_2_2_1)
hsb.grid(column=0, row=1, sticky='ew', in_=sub_2_2_1)
sub_2_2_1.grid_columnconfigure(0, weight=1)
sub_2_2_1.grid_rowconfigure(0, weight=1)
return selected | PalmDr/XRD-Data-Analysis-Toolkit | Beta1.3/Builder.py | Python | apache-2.0 | 2,100 | 0.012857 |
"""
Support for Unifi WAP controllers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.unifi/
"""
import logging
import urllib
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
# Unifi package doesn't list urllib3 as a requirement
REQUIREMENTS = ['urllib3', 'unifi==1.2.5']
_LOGGER = logging.getLogger(__name__)
CONF_PORT = 'port'
CONF_SITE_ID = 'site_id'
def get_scanner(hass, config):
"""Setup Unifi device_tracker."""
from unifi.controller import Controller
if not validate_config(config, {DOMAIN: [CONF_USERNAME,
CONF_PASSWORD]},
_LOGGER):
_LOGGER.error('Invalid configuration')
return False
this_config = config[DOMAIN]
host = this_config.get(CONF_HOST, 'localhost')
username = this_config.get(CONF_USERNAME)
password = this_config.get(CONF_PASSWORD)
site_id = this_config.get(CONF_SITE_ID, 'default')
try:
port = int(this_config.get(CONF_PORT, 8443))
except ValueError:
_LOGGER.error('Invalid port (must be numeric like 8443)')
return False
try:
ctrl = Controller(host, username, password, port, 'v4', site_id)
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to connect to unifi: %s', ex)
return False
return UnifiScanner(ctrl)
class UnifiScanner(object):
"""Provide device_tracker support from Unifi WAP client data."""
def __init__(self, controller):
"""Initialize the scanner."""
self._controller = controller
self._update()
def _update(self):
"""Get the clients from the device."""
try:
clients = self._controller.get_clients()
except urllib.error.HTTPError as ex:
_LOGGER.error('Failed to scan clients: %s', ex)
clients = []
self._clients = {client['mac']: client for client in clients}
def scan_devices(self):
"""Scan for devices."""
self._update()
return self._clients.keys()
def get_device_name(self, mac):
"""Return the name (if known) of the device.
If a name has been set in Unifi, then return that, else
return the hostname if it has been detected.
"""
client = self._clients.get(mac, {})
name = client.get('name') or client.get('hostname')
_LOGGER.debug('Device %s name %s', mac, name)
return name
| deisi/home-assistant | homeassistant/components/device_tracker/unifi.py | Python | mit | 2,645 | 0 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 12, transform = "Logit", sigma = 0.0, exog_count = 100, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_Logit/trend_MovingAverage/cycle_12/ar_12/test_artificial_128_Logit_MovingAverage_12_12_100.py | Python | bsd-3-clause | 267 | 0.086142 |
"""
Django settings for becours project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'v8rea$)b8+a)1vbdbdn727zw7#hj$4czarlp)*j&ei@eh%=!9^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cuser',
'booking',
'accounting',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'booking.middleware.CuserMiddleware',
]
ROOT_URLCONF = 'becours.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'becours/templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'becours.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'USER': 'postgres',
'PASSWORD': '',
'NAME': 'becours',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'fr-fr'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'becours', 'static'),
)
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
LOGIN_URL = 'auth:login'
USE_THOUSAND_SEPARATOR = True
| eedf/becours | becours/settings.py | Python | mit | 3,562 | 0.001123 |
#=======================================================================
# Author: Donovan Parks
#
# Sequence histogram plot.
#
# Copyright 2011 Donovan Parks
#
# This file is part of STAMP.
#
# STAMP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# STAMP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with STAMP. If not, see <http://www.gnu.org/licenses/>.
#=======================================================================
import sys
import math
from PyQt4 import QtCore, QtGui
from stamp.plugins.samples.AbstractSamplePlotPlugin import AbstractSamplePlotPlugin, TestWindow, ConfigureDialog
from stamp.plugins.samples.plots.configGUI.seqHistogramUI import Ui_SeqHistogramDialog
class SeqHistogram(AbstractSamplePlotPlugin):
'''
Sequence histogram plot.
'''
def __init__(self, preferences, parent=None):
AbstractSamplePlotPlugin.__init__(self, preferences, parent)
self.preferences = preferences
self.name = 'Sequence histogram'
self.type = 'Exploratory'
self.settings = preferences['Settings']
self.figWidth = self.settings.value(self.name + '/width', 7.0).toDouble()[0]
self.figHeight = self.settings.value(self.name + '/height', 7.0).toDouble()[0]
self.bCustomBinWidth = self.settings.value(self.name + '/custom bin width', False).toBool()
self.binWidth = self.settings.value(self.name + '/bin width', 100.0).toDouble()[0]
self.yAxisLogScale = self.settings.value(self.name + '/log scale', False).toBool()
self.bCustomXaxis = self.settings.value(self.name + '/custom x-axis extents', False).toBool()
self.xLimitLeft = self.settings.value(self.name + '/min value', 0.0).toDouble()[0]
self.xLimitRight = self.settings.value(self.name + '/max value', 1.0).toDouble()[0]
self.legendPos = self.settings.value(self.name + '/legend position', 0).toInt()[0]
def mirrorProperties(self, plotToCopy):
self.name = plotToCopy.name
self.figWidth = plotToCopy.figWidth
self.figHeight = plotToCopy.figHeight
self.bCustomBinWidth = plotToCopy.bCustomBinWidth
self.binWidth = plotToCopy.binWidth
self.yAxisLogScale = plotToCopy.yAxisLogScale
self.bCustomXaxis = plotToCopy.bCustomXaxis
self.xLimitLeft = plotToCopy.xLimitLeft
self.xLimitRight = plotToCopy.xLimitRight
self.legendPos = plotToCopy.legendPos
def plot(self, profile, statsResults):
if len(profile.profileDict) <= 0:
self.emptyAxis()
return
# *** Colour of plot elements
axesColour = str(self.preferences['Axes colour'].name())
profile1Colour = str(self.preferences['Sample 1 colour'].name())
profile2Colour = str(self.preferences['Sample 2 colour'].name())
# *** Get sequence counts
seqs1 = profile.getSequenceCounts(0)
seqs2 = profile.getSequenceCounts(1)
# *** Set x-axis limit
self.xMin = min(min(seqs1),min(seqs2))
if self.xLimitLeft == None:
self.xLimitLeft = self.xMin
self.xMax = max(max(seqs1),max(seqs2))
if self.xLimitRight == None:
self.xLimitRight = self.xMax
# Set bin width
if not self.bCustomBinWidth:
self.binWidth = (self.xMax - self.xMin) / 40
# *** Set size of figure
self.fig.clear()
self.fig.set_size_inches(self.figWidth, self.figHeight)
heightBottomLabels = 0.4 # inches
widthSideLabel = 0.5 # inches
padding = 0.2 # inches
axesHist = self.fig.add_axes([widthSideLabel/self.figWidth,heightBottomLabels/self.figHeight,\
1.0-(widthSideLabel+padding)/self.figWidth,\
1.0-(heightBottomLabels+padding)/self.figHeight])
# *** Histogram plot
bins = [0]
binEnd = self.binWidth
while binEnd <= self.xMax:
bins.append(binEnd)
binEnd += self.binWidth
bins.append(binEnd)
n, b, patches = axesHist.hist([seqs1, seqs2], bins=bins, log=self.yAxisLogScale)
for patch in patches[0]:
patch.set_facecolor(profile1Colour)
for patch in patches[1]:
patch.set_facecolor(profile2Colour)
if self.bCustomXaxis:
axesHist.set_xlim(self.xLimitLeft, self.xLimitRight)
axesHist.set_xlabel('Sequences')
axesHist.set_ylabel('Number of features')
# *** Prettify plot
if self.legendPos != -1:
legend = axesHist.legend([patches[0][0], patches[1][0]], (profile.sampleNames[0], profile.sampleNames[1]), loc=self.legendPos)
legend.get_frame().set_linewidth(0)
for a in axesHist.yaxis.majorTicks:
a.tick1On=True
a.tick2On=False
for a in axesHist.xaxis.majorTicks:
a.tick1On=True
a.tick2On=False
for line in axesHist.yaxis.get_ticklines():
line.set_color(axesColour)
for line in axesHist.xaxis.get_ticklines():
line.set_color(axesColour)
for loc, spine in axesHist.spines.iteritems():
if loc in ['right','top']:
spine.set_color('none')
else:
spine.set_color(axesColour)
self.updateGeometry()
self.draw()
def configure(self, profile, statsResults):
self.profile = profile
self.configDlg = ConfigureDialog(Ui_SeqHistogramDialog)
self.connect(self.configDlg.ui.chkCustomBinWidth, QtCore.SIGNAL('toggled(bool)'), self.changeCustomBinWidth)
self.connect(self.configDlg.ui.chkCustomXaxis, QtCore.SIGNAL('toggled(bool)'), self.changeCustomXaxis)
self.connect(self.configDlg.ui.btnXmin, QtCore.SIGNAL('clicked()'), self.setXaxisMin)
self.connect(self.configDlg.ui.btnXmax, QtCore.SIGNAL('clicked()'), self.setXaxisMax)
self.configDlg.ui.spinFigWidth.setValue(self.figWidth)
self.configDlg.ui.spinFigHeight.setValue(self.figHeight)
self.configDlg.ui.chkCustomBinWidth.setChecked(self.bCustomBinWidth)
self.configDlg.ui.spinBinWidth.setValue(self.binWidth)
self.configDlg.ui.chkLogScale.setChecked(self.yAxisLogScale)
self.configDlg.ui.chkCustomXaxis.setChecked(self.bCustomXaxis)
self.configDlg.ui.spinXmin.setValue(self.xLimitLeft)
self.configDlg.ui.spinXmax.setValue(self.xLimitRight)
self.changeCustomBinWidth()
self.changeCustomXaxis()
# legend position
if self.legendPos == 0:
self.configDlg.ui.radioLegendPosBest.setDown(True)
elif self.legendPos == 1:
self.configDlg.ui.radioLegendPosUpperRight.setChecked(True)
elif self.legendPos == 7:
self.configDlg.ui.radioLegendPosCentreRight.setChecked(True)
elif self.legendPos == 4:
self.configDlg.ui.radioLegendPosLowerRight.setChecked(True)
elif self.legendPos == 2:
self.configDlg.ui.radioLegendPosUpperLeft.setChecked(True)
elif self.legendPos == 6:
self.configDlg.ui.radioLegendPosCentreLeft.setChecked(True)
elif self.legendPos == 3:
self.configDlg.ui.radioLegendPosLowerLeft.setChecked(True)
else:
self.configDlg.ui.radioLegendPosNone.setChecked(True)
if self.configDlg.exec_() == QtGui.QDialog.Accepted:
self.figWidth = self.configDlg.ui.spinFigWidth.value()
self.figHeight = self.configDlg.ui.spinFigHeight.value()
self.bCustomBinWidth = self.configDlg.ui.chkCustomBinWidth.isChecked()
self.binWidth = self.configDlg.ui.spinBinWidth.value()
self.yAxisLogScale = self.configDlg.ui.chkLogScale.isChecked()
self.bCustomXaxis = self.configDlg.ui.chkCustomXaxis.isChecked()
self.xLimitLeft = self.configDlg.ui.spinXmin.value()
self.xLimitRight = self.configDlg.ui.spinXmax.value()
# legend position
if self.configDlg.ui.radioLegendPosBest.isChecked() == True:
self.legendPos = 0
elif self.configDlg.ui.radioLegendPosUpperRight.isChecked() == True:
self.legendPos = 1
elif self.configDlg.ui.radioLegendPosCentreRight.isChecked() == True:
self.legendPos = 7
elif self.configDlg.ui.radioLegendPosLowerRight.isChecked() == True:
self.legendPos = 4
elif self.configDlg.ui.radioLegendPosUpperLeft.isChecked() == True:
self.legendPos = 2
elif self.configDlg.ui.radioLegendPosCentreLeft.isChecked() == True:
self.legendPos = 6
elif self.configDlg.ui.radioLegendPosLowerLeft.isChecked() == True:
self.legendPos = 3
else:
self.legendPos = -1
self.settings.setValue(self.name + '/width', self.figWidth)
self.settings.setValue(self.name + '/height', self.figHeight)
self.settings.setValue(self.name + '/custom bin width', self.bCustomBinWidth)
self.settings.setValue(self.name + '/bin width', self.binWidth)
self.settings.setValue(self.name + '/log scale', self.yAxisLogScale)
self.settings.setValue(self.name + '/custom x-axis extents', self.bCustomXaxis)
self.settings.setValue(self.name + '/min value', self.xLimitLeft)
self.settings.setValue(self.name + '/max value', self.xLimitRight)
self.settings.setValue(self.name + '/legend position', self.legendPos)
self.plot(profile, statsResults)
def changeCustomBinWidth(self):
self.configDlg.ui.spinBinWidth.setEnabled(self.configDlg.ui.chkCustomBinWidth.isChecked())
def changeCustomXaxis(self):
self.configDlg.ui.spinXmin.setEnabled(self.configDlg.ui.chkCustomXaxis.isChecked())
self.configDlg.ui.spinXmax.setEnabled(self.configDlg.ui.chkCustomXaxis.isChecked())
def setXaxisMin(self):
seqs1 = self.profile.getSequenceCounts(0)
seqs2 = self.profile.getSequenceCounts(1)
self.configDlg.ui.spinXmin.setValue(min(min(seqs1), min(seqs2)))
def setXaxisMax(self):
seqs1 = self.profile.getSequenceCounts(0)
seqs2 = self.profile.getSequenceCounts(1)
self.configDlg.ui.spinXmax.setValue(max(max(seqs1), max(seqs2)))
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
testWindow = TestWindow(SeqHistogram)
testWindow.show()
sys.exit(app.exec_())
| dparks1134/STAMP | stamp/plugins/samples/plots/SeqHistogram.py | Python | gpl-3.0 | 9,822 | 0.032376 |
# Copyright (c) 2012-2016 Seafile Ltd.
import seahub.settings as settings
# OfficeOnlineServer, OnlyOffice, CollaboraOffice
OFFICE_SERVER_TYPE = getattr(settings, 'OFFICE_SERVER_TYPE', '')
OFFICE_WEB_APP_BASE_URL = getattr(settings, 'OFFICE_WEB_APP_BASE_URL', '')
WOPI_ACCESS_TOKEN_EXPIRATION = getattr(settings, 'WOPI_ACCESS_TOKEN_EXPIRATION', 12 * 60 * 60)
OFFICE_WEB_APP_DISCOVERY_EXPIRATION = getattr(settings, 'OFFICE_WEB_APP_DISCOVERY_EXPIRATION', 7 * 24 * 60 * 60)
ENABLE_OFFICE_WEB_APP = getattr(settings, 'ENABLE_OFFICE_WEB_APP', False)
OFFICE_WEB_APP_FILE_EXTENSION = getattr(settings, 'OFFICE_WEB_APP_FILE_EXTENSION', ())
ENABLE_OFFICE_WEB_APP_EDIT = getattr(settings, 'ENABLE_OFFICE_WEB_APP_EDIT', False)
OFFICE_WEB_APP_EDIT_FILE_EXTENSION = getattr(settings, 'OFFICE_WEB_APP_EDIT_FILE_EXTENSION', ())
## Client certificates ##
# path to client.cert when use client authentication
OFFICE_WEB_APP_CLIENT_CERT = getattr(settings, 'OFFICE_WEB_APP_CLIENT_CERT', '')
# path to client.key when use client authentication
OFFICE_WEB_APP_CLIENT_KEY = getattr(settings, 'OFFICE_WEB_APP_CLIENT_KEY', '')
# path to client.pem when use client authentication
OFFICE_WEB_APP_CLIENT_PEM = getattr(settings, 'OFFICE_WEB_APP_CLIENT_PEM', '')
## Server certificates ##
# Path to a CA_BUNDLE file or directory with certificates of trusted CAs
OFFICE_WEB_APP_SERVER_CA = getattr(settings, 'OFFICE_WEB_APP_SERVER_CA', True)
| miurahr/seahub | seahub/wopi/settings.py | Python | apache-2.0 | 1,424 | 0.005618 |
import sys
import signal
from threading import Thread
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
class PUTHandler(BaseHTTPRequestHandler):
def do_PUT(self):
print "----- SOMETHING WAS PUT!! ------"
print self.headers
length = int(self.headers['Content-Length'])
content = self.rfile.read(length)
self.send_response(200)
print content
def run_on(port):
print("Starting a server on port %i" % port)
server_address = ('localhost', port)
httpd = HTTPServer(server_address, PUTHandler)
httpd.serve_forever()
if __name__ == "__main__":
server = Thread(target=run_on, args=[81])
server.daemon = True # Do not make us wait for you to exit
server.start()
signal.pause() # Wait for interrupt signal, e.g. KeyboardInterrupt
| fp7-netide/Tools | traffem/apps/http/serverPUT.py | Python | epl-1.0 | 819 | 0.006105 |
"""
Support for Xiaomi Mi WiFi Repeater 2.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/device_tracker.xiaomi_miio/
"""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import (DOMAIN, PLATFORM_SCHEMA,
DeviceScanner)
from homeassistant.const import (CONF_HOST, CONF_TOKEN)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_TOKEN): vol.All(cv.string, vol.Length(min=32, max=32)),
})
REQUIREMENTS = ['python-miio==0.4.1', 'construct==2.9.41']
def get_scanner(hass, config):
"""Return a Xiaomi MiIO device scanner."""
from miio import WifiRepeater, DeviceException
scanner = None
host = config[DOMAIN].get(CONF_HOST)
token = config[DOMAIN].get(CONF_TOKEN)
_LOGGER.info(
"Initializing with host %s (token %s...)", host, token[:5])
try:
device = WifiRepeater(host, token)
device_info = device.info()
_LOGGER.info("%s %s %s detected",
device_info.model,
device_info.firmware_version,
device_info.hardware_version)
scanner = XiaomiMiioDeviceScanner(device)
except DeviceException as ex:
_LOGGER.error("Device unavailable or token incorrect: %s", ex)
return scanner
class XiaomiMiioDeviceScanner(DeviceScanner):
"""This class queries a Xiaomi Mi WiFi Repeater."""
def __init__(self, device):
"""Initialize the scanner."""
self.device = device
async def async_scan_devices(self):
"""Scan for devices and return a list containing found device ids."""
from miio import DeviceException
devices = []
try:
station_info = await self.hass.async_add_job(self.device.status)
_LOGGER.debug("Got new station info: %s", station_info)
for device in station_info.associated_stations:
devices.append(device['mac'])
except DeviceException as ex:
_LOGGER.error("Got exception while fetching the state: %s", ex)
return devices
async def async_get_device_name(self, device):
"""Return None.
The repeater doesn't provide the name of the associated device.
"""
return None
| persandstrom/home-assistant | homeassistant/components/device_tracker/xiaomi_miio.py | Python | apache-2.0 | 2,487 | 0 |
# ----------------------------------------------------------------------------
# Copyright 2015 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
from neon.layers.layer import (Linear, Bias, Affine, Conv, Convolution, GeneralizedCost, Dropout,
Pooling, Activation, BatchNorm, BatchNormAutodiff,
Deconv, GeneralizedCostMask)
from neon.layers.merge import Merge, MergeSum, MergeConcat, MergeConcatSequence
from neon.layers.recurrent import Recurrent, LSTM, GRU
| chetan51/neon | neon/layers/__init__.py | Python | apache-2.0 | 1,116 | 0.001792 |
from datetime import datetime
from google.appengine.ext import db
from django.utils import simplejson as json
from couch import errors
from couch.models.util import gen_uuid
class DocumentRoot(db.Model):
''' Controls document '''
revno = db.IntegerProperty(default = 0)
revsuffix = db.StringProperty()
deleted = db.BooleanProperty(default = False)
def rev(self):
return '%s-%s' % (self.revno, self.revsuffix)
class Document(db.Model):
id = db.StringProperty()
rev = db.StringProperty()
dbname = db.StringProperty()
docstring = db.TextProperty()
deleted = db.BooleanProperty(default = False)
def to_dict(self):
return json.loads(self.docstring)
| yssk22/gaecouch | couch/models/document.py | Python | apache-2.0 | 708 | 0.011299 |
import sys
from cyvcf2 import VCF
from collections import defaultdict
def parse_caller_vcfs(sample_dict, caller_list):
caller_vcf_records = defaultdict(lambda: dict())
for caller in caller_list:
parse_vcf(sample_dict[caller], caller, caller_vcf_records)
return caller_vcf_records
def parse_vcf(vcf_file, caller, caller_vcf_records):
sys.stdout.write("Reading {}\n".format(vcf_file))
vcf = VCF(vcf_file)
for record in vcf:
if len(record.ALT) > 1:
sys.stderr.write("ERROR: More than one alternative allele detected in file "
"{}\n Record: {}\n".format(vcf_file, record))
sys.exit()
key = ("chr{}".format(record.CHROM), int(record.start), int(record.end), record.REF,
record.ALT[0])
caller_vcf_records[caller][key] = record
def parse_mutect_vcf_record(record):
# Pseudocount. Encountered a division by zero issue in at least one mutect record
depth = int(record.gt_depths[0])
if depth < 1:
depth = 1
info = {'DP': str(depth),
'FILTER': str(record.FILTER),
'GTF_DP': str(record.gt_depths[0]),
'GTF_AD': str(record.gt_alt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.gt_alt_depths[0]) / float(depth))}
return info
def parse_vardict_vcf_record(record):
info = {'DP': str(record.INFO.get('DP')),
'VD': str(record.INFO.get('VD')),
'AF': str(record.INFO.get('AF')),
'FILTER': str(record.FILTER),
'BIAS': str(record.INFO.get('BIAS')),
'REFBIAS': str(record.INFO.get('REFBIAS')),
'VARBIAS': str(record.INFO.get('VARBIAS')),
'QUAL': str(record.INFO.get('QUAL')),
'QSTD': str(record.INFO.get('QSTD')),
'SBF': str(record.INFO.get('SBF')),
'ODDRATIO': str(record.INFO.get('ODDRATIO')),
'MQ': str(record.INFO.get('MQ')),
'SN': str(record.INFO.get('SN')),
'HIAF': str(record.INFO.get('HIAF')),
'ADJAF': str(record.INFO.get('ADJAF')),
'MSI': str(record.INFO.get('MSI')),
'MSILEN': str(record.INFO.get('MSILEN')),
'SHIFT3': str(record.INFO.get('SHIFT3')),
'NM': str(record.INFO.get('NM')),
'GDAMP': str(record.INFO.get('GDAMP')),
'LSEQ': str(record.INFO.get('LSEQ')),
'RSEQ': str(record.INFO.get('RSEQ')),
'TLAMP': str(record.INFO.get('TLAMP')),
'NCAMP': str(record.INFO.get('NCAMP')),
'AMPFLAG': str(record.INFO.get('AMPFLAG')),
'HICNT': str(record.INFO.get('HICNT')),
'HICOV': str(record.INFO.get('HICOV')),
'GTF_DP': str(record.gt_depths[0]),
'GTF_AD': str(record.gt_alt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.gt_alt_depths[0]) / float(record.gt_depths[0]))}
return info
def parse_freebayes_vcf_record(record):
info = {'DP': str(record.INFO.get('DP')),
'AF': str(record.INFO.get('AF')),
'FILTER': str(record.FILTER),
'AC': str(record.INFO.get('AC')),
'RO': str(record.INFO.get('RO')),
'AO': str(record.INFO.get('AO')),
'PRO': str(record.INFO.get('PRO')),
'PAO': str(record.INFO.get('PAO')),
'QR': str(record.INFO.get('QR')),
'QA': str(record.INFO.get('QA')),
'PQR': str(record.INFO.get('PQR')),
'PQA': str(record.INFO.get('PQA')),
'SRF': str(record.INFO.get('SRF')),
'SRR': str(record.INFO.get('SRR')),
'SAF': str(record.INFO.get('SAF')),
'SAR': str(record.INFO.get('SAR')),
'SRP': str(record.INFO.get('SRP')),
'SAP': str(record.INFO.get('SAP')),
'AB': str(record.INFO.get('AB')),
'ABP': str(record.INFO.get('ABP')),
'RUN': str(record.INFO.get('RUN')),
'RPP': str(record.INFO.get('RPP')),
'RPPR': str(record.INFO.get('RPPR')),
'RPL': str(record.INFO.get('RPL')),
'RPR': str(record.INFO.get('RPR')),
'EPP': str(record.INFO.get('EPP')),
'EPPR': str(record.INFO.get('EPPR')),
'DRPA': str(record.INFO.get('DRPA')),
'ODDS': str(record.INFO.get('ODDS')),
'GTI': str(record.INFO.get('GTI')),
'TYPE': str(record.INFO.get('TYPE')),
'CIGAR': str(record.INFO.get('CIGAR')),
'NUMALT': str(record.INFO.get('NUMALT')),
'MEANALT': str(record.INFO.get('MEANALT')),
'LEN': str(record.INFO.get('LEN')),
'MQM': str(record.INFO.get('MQM')),
'MQMR': str(record.INFO.get('MQMR')),
'PAIRED': str(record.INFO.get('PAIRED')),
'PAIREDR': str(record.INFO.get('PAIREDR')),
'GTF_DP': str(record.gt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.INFO.get('AO')) / float(record.gt_depths[0]))}
return info
def parse_scalpel_vcf_record(record):
info = {'DP': str(record.gt_depths[0]),
'AVGCOV': str(record.INFO.get('AVGCOV')),
'MINCOV': str(record.INFO.get('MINCOV')),
'ALTCOV': str(record.INFO.get('ALTCOV')),
'COVRATIO': str(record.INFO.get('COVRATIO')),
'FILTER': str(record.FILTER),
'ZYG': str(record.INFO.get('ZYG')),
'CHI2': str(record.INFO.get('CHI2')),
'FISHERPHREDSCORE': str(record.INFO.get('FISHERPHREDSCORE')),
'INH': str(record.INFO.get('INH')),
'BESTSTATE': str(record.INFO.get('BESTSTATE')),
'COVSTATE': str(record.INFO.get('COVSTATE')),
'SOMATIC': str(record.INFO.get('SOMATIC')),
'DENOVO': str(record.INFO.get('DENOVO')),
'GTF_DP': str(record.gt_depths[0]),
'GTF_AD': str(record.gt_alt_depths[0]),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': str(float(record.gt_alt_depths[0]) / float(record.gt_depths[0]))}
return info
def parse_platypus_vcf_record(record):
multi_allelic = record.INFO.get('OLD_MULTIALLELIC') or False
if multi_allelic:
tr = record.INFO.get('TR')[0]
else:
tr = record.INFO.get('TR')
if float(record.INFO.get('TC')) < 1:
aaf = "0"
else:
aaf = str(float(tr) / float(record.INFO.get('TC')))
info = {'DP': str(tr),
'FR': str(record.INFO.get('FR')),
'MMLQ': str(record.INFO.get('MMLQ')),
'TCR': str(record.INFO.get('TCR')),
'HP': str(record.INFO.get('HP')),
'WE': str(record.INFO.get('WE')),
'WS': str(record.INFO.get('WS')),
'FS': str(record.INFO.get('FS')),
'TR': str(tr),
'NF': str(record.INFO.get('NF')),
'TCF': str(record.INFO.get('TCF')),
'NR': str(record.INFO.get('NR')),
'TC': str(record.INFO.get('TC')),
'END': str(record.INFO.get('END')),
'MGOF': str(record.INFO.get('MGOF')),
'SbPval': str(record.INFO.get('SbPval')),
'START': str(record.INFO.get('START')),
'ReadPosRankSum': str(record.INFO.get('ReadPosRankSum')),
'MQ': str(record.INFO.get('MQ')),
'QD': str(record.INFO.get('QD')),
'SC': str(record.INFO.get('SC')),
'BRF': str(record.INFO.get('BRF')),
'HapScore': str(record.INFO.get('HapScore')),
'FILTER': str(record.FILTER),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None,
'AAF': aaf}
return info
def parse_pindel_vcf_record(record):
info = {'DP': str(record.gt_depths[0]),
'END': str(record.INFO.get('END')),
'HOMLEN': str(record.INFO.get('HOMLEN')),
'HOMSEQ': str(record.INFO.get('HOMSEQ')),
'SVLEN': str(record.INFO.get('SVLEN')),
'SVTYPE': str(record.INFO.get('SVTYPE')),
'NTLEN': str(record.INFO.get('NTLEN')),
'GTF_DP': str(record.gt_depths[0]),
'GTF_AD': str(record.gt_alt_depths[0]),
'AAF': str(float(record.gt_alt_depths[0]) / float(record.gt_depths[0])),
'FILTER': str(record.FILTER),
'MULTIALLELIC': str(record.INFO.get('OLD_MULTIALLELIC')) or None}
return info
def var_is_rare(variant_data, threshold):
"""Check if variant is rare, as defined by the passed cutoff
:param variant_data: A GeminiRow for a single variant.
:type variant_data: GeminiRow.
:param threshold: Allele frequency rarity threshold.
:type threshold: float.
:returns: bool -- True or False.
"""
if variant_data.INFO.get('in_esp') != 0 or variant_data.INFO.get('in_1kg') != 0 or variant_data.INFO.get('in_exac') != 0:
if variant_data.INFO.get('max_aaf_all') > threshold:
return False
else:
return True
else:
return True
def var_is_in_cosmic(variant_data):
"""Check if variant is in the COSMIC database
:param variant_data: A GeminiRow for a single variant.
:type variant_data: GeminiRow.
:returns: bool -- True or False.
"""
if variant_data.INFO.get('cosmic_ids') is not None:
return True
else:
return False
def var_is_in_clinvar(variant_data):
"""Check if variant is in the ClinVar database
:param variant_data: A GeminiRow for a single variant.
:type variant_data: GeminiRow.
:returns: bool -- True or False.
"""
if variant_data.INFO.get('clinvar_sig') is not None:
return True
else:
return False
def var_is_pathogenic(variant_data):
"""Check if variant is listed as pathogenic in ClinVar
:param variant_data: A GeminiRow for a single variant.
:type variant_data: GeminiRow.
:returns: bool -- True or False.
"""
if variant_data.INFO.get('clinvar_sig') is not None:
if "pathogenic" in variant_data.INFO.get('clinvar_sig'):
return True
else:
return False
else:
return False
def var_is_protein_effecting(variant_data):
"""Check if variant has a MED or HIGH impact
:param variant_data: A GeminiRow for a single variant.
:type variant_data: GeminiRow.
:returns: bool -- True or False.
"""
if variant_data.INFO.get('impact_severity') != "LOW":
return True
else:
return False
def var_in_gene(variant_data, genes):
"""Check if variant has a gene name associated with it
:param variant_data: A GeminiRow for a single variant.
:type variant_data: GeminiRow.
:returns: bool -- True or False.
"""
if variant_data.INFO.get('gene') in genes:
return True
else:
return False
def var_is_lof(variant_data):
if variant_data.INFO.get('is_lof'):
return True
else:
return False
def var_is_coding(variant_data):
if variant_data.INFO.get('is_coding'):
return True
else:
return False
def var_is_splicing(variant_data):
if variant_data.INFO.get('is_splicing'):
return True
else:
return False
def parse_rs_ids(variant_data):
if variant_data.INFO.get('rs_ids') is not None:
return variant_data.INFO.get('rs_ids').split(',')
else:
return []
def parse_cosmic_ids(variant_data):
if variant_data.INFO.get('cosmic_ids') is not None:
return variant_data.INFO.get('cosmic_ids').split(',')
else:
return []
| GastonLab/ddb-mongodb | vcf_parsing.py | Python | mit | 11,779 | 0.000679 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG for Google Cloud Dataflow service
"""
import os
from airflow import models
from airflow.providers.google.cloud.operators.dataflow import DataflowStartSqlJobOperator
from airflow.utils.dates import days_ago
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project")
BQ_SQL_DATASET = os.environ.get("DATAFLOW_BQ_SQL_DATASET", "airflow_dataflow_samples")
BQ_SQL_TABLE_INPUT = os.environ.get("BQ_SQL_TABLE_INPUT", "beam_input")
BQ_SQL_TABLE_OUTPUT = os.environ.get("BQ_SQL_TABLE_OUTPUT", "beam_output")
DATAFLOW_SQL_JOB_NAME = os.environ.get("DATAFLOW_SQL_JOB_NAME", "dataflow-sql")
DATAFLOW_SQL_LOCATION = os.environ.get("DATAFLOW_SQL_LOCATION", "us-west1")
with models.DAG(
dag_id="example_gcp_dataflow_sql",
start_date=days_ago(1),
schedule_interval=None, # Override to match your needs
tags=['example'],
) as dag_sql:
start_sql = DataflowStartSqlJobOperator(
task_id="start_sql_query",
job_name=DATAFLOW_SQL_JOB_NAME,
query=f"""
SELECT
sales_region as sales_region,
count(state_id) as count_state
FROM
bigquery.table.`{GCP_PROJECT_ID}`.`{BQ_SQL_DATASET}`.`{BQ_SQL_TABLE_INPUT}`
WHERE state_id >= @state_id_min
GROUP BY sales_region;
""",
options={
"bigquery-project": GCP_PROJECT_ID,
"bigquery-dataset": BQ_SQL_DATASET,
"bigquery-table": BQ_SQL_TABLE_OUTPUT,
"bigquery-write-disposition": "write-truncate",
"parameter": "state_id_min:INT64:2",
},
location=DATAFLOW_SQL_LOCATION,
do_xcom_push=True,
)
| airbnb/airflow | airflow/providers/google/cloud/example_dags/example_dataflow_sql.py | Python | apache-2.0 | 2,471 | 0.000809 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that the --srcdir option works to fetch things from a repository.
"""
import TestSCons
test = TestSCons.TestSCons()
test.subdir('repository', 'work1')
repository = test.workpath('repository')
test.write(['repository', 'SConstruct'], r"""
env = Environment()
env.Command('file.out', 'file.in', Copy('$TARGET', '$SOURCE'))
""")
test.write(['repository', 'file.in'], "repository/file.in\n")
opts = '--srcdir ' + repository
# Make the entire repository non-writable, so we'll detect
# if we try to write into it accidentally.
test.writable('repository', 0)
test.run(chdir = 'work1', options = opts, arguments = '.')
test.must_match(['work1', 'file.out'], "repository/file.in\n")
test.up_to_date(chdir = 'work1', options = opts, arguments = '.')
#
test.pass_test()
| datalogics/scons | test/option/srcdir.py | Python | mit | 1,950 | 0.006667 |
"""
Q4- Write a Python function, odd, that takes in one number and returns True when the number is odd and False otherwise. You should use the % (mod) operator, not if. This function takes in one number and returns a boolean
"""
def odd( number ):
return number % 2 == 1
number = int( input( "Enter a number: ") )
print( "Is the number " + str( number ) + " odd? Answer: " + str( odd( number) ) )
| SuyashD95/python-assignments | Assignment 3/odd.py | Python | mit | 403 | 0.044665 |
import contextlib
import gzip
import hashlib
import io
import mmap
from builtins import (
map as imap,
)
def gzip_compress(data, compresslevel=6):
compressed = io.BytesIO()
with gzip.GzipFile(fileobj=compressed,
mode="wb",
compresslevel=compresslevel) as compressor:
compressor.write(data)
return compressed.getvalue()
def hash_file(fn, hn):
h = hashlib.new(hn)
with open(fn, "r") as fh:
with contextlib.closing(mmap.mmap(fh.fileno(), 0, prot=mmap.PROT_READ)) as mm:
h.update(mm)
return h.digest()
def indent(text, spaces):
spaces = " " * int(spaces)
return "\n".join(imap(lambda l: spaces + l, text.splitlines()))
| nanshe-org/nanshe_workflow | nanshe_workflow/util.py | Python | apache-2.0 | 733 | 0.001364 |
from .views import PageView
from django.urls import path
urlpatterns = [
path('<path:path>/', PageView.as_view(), name='page_detail'),
]
| manhhomienbienthuy/pythondotorg | pages/urls.py | Python | apache-2.0 | 142 | 0 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/creature/npc/droid/crafted/shared_droideka_advanced.iff"
result.attribute_template_id = 3
result.stfName("droid_name","droideka_crafted_advanced")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/creature/npc/droid/crafted/shared_droideka_advanced.py | Python | mit | 474 | 0.046414 |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from ella.core.models import Publishable
class XArticle(Publishable):
"""
``XArticle`` is extra publishable descendant for testing.
Is used for possibility testing descendants of publishable
with different content type
"""
content = models.TextField(_('Content'), default='')
class Meta:
verbose_name = _('XArticle')
verbose_name_plural = _('XArticles')
| MichalMaM/ella | test_ella/test_app/models.py | Python | bsd-3-clause | 487 | 0 |
from django import forms
from lists.models import Item
EMPTY_LIST_ERROR = "You can't have an empty list item"
class ItemForm(forms.models.ModelForm):
class Meta:
model = Item
fields = ('text',)
widgets = {
'text': forms.fields.TextInput(attrs={
'placeholder': 'Enter a to-do item',
'class': 'form-control input-lg'
}),
}
error_messages = {
'text': {'required': "You can't have an empty list item"}
} | rmelchorv/TDD-Cuervos | lists/forms.py | Python | mit | 435 | 0.034483 |
# -*- coding: utf-8 -*-
"""
logbook._fallback
~~~~~~~~~~~~~~~~~
Fallback implementations in case speedups is not around.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
from itertools import count
from logbook.helpers import get_iterator_next_method
from logbook.concurrency import (thread_get_ident, greenlet_get_ident,
thread_local, greenlet_local,
ThreadLock, GreenletRLock, is_gevent_enabled)
_missing = object()
_MAX_CONTEXT_OBJECT_CACHE = 256
def group_reflected_property(name, default, fallback=_missing):
"""Returns a property for a given name that falls back to the
value of the group if set. If there is no such group, the
provided default is used.
"""
def _get(self):
rv = getattr(self, '_' + name, _missing)
if rv is not _missing and rv != fallback:
return rv
if self.group is None:
return default
return getattr(self.group, name)
def _set(self, value):
setattr(self, '_' + name, value)
def _del(self):
delattr(self, '_' + name)
return property(_get, _set, _del)
class _StackBound(object):
def __init__(self, obj, push, pop):
self.__obj = obj
self.__push = push
self.__pop = pop
def __enter__(self):
self.__push()
return self.__obj
def __exit__(self, exc_type, exc_value, tb):
self.__pop()
class StackedObject(object):
"""Baseclass for all objects that provide stack manipulation
operations.
"""
def push_greenlet(self):
"""Pushes the stacked object to the greenlet stack."""
raise NotImplementedError()
def pop_greenlet(self):
"""Pops the stacked object from the greenlet stack."""
raise NotImplementedError()
def push_thread(self):
"""Pushes the stacked object to the thread stack."""
raise NotImplementedError()
def pop_thread(self):
"""Pops the stacked object from the thread stack."""
raise NotImplementedError()
def push_application(self):
"""Pushes the stacked object to the application stack."""
raise NotImplementedError()
def pop_application(self):
"""Pops the stacked object from the application stack."""
raise NotImplementedError()
def __enter__(self):
if is_gevent_enabled():
self.push_greenlet()
else:
self.push_thread()
return self
def __exit__(self, exc_type, exc_value, tb):
if is_gevent_enabled():
self.pop_greenlet()
else:
self.pop_thread()
def greenletbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the greenlet.
"""
return _cls(self, self.push_greenlet, self.pop_greenlet)
def threadbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the thread.
"""
return _cls(self, self.push_thread, self.pop_thread)
def applicationbound(self, _cls=_StackBound):
"""Can be used in combination with the `with` statement to
execute code while the object is bound to the application.
"""
return _cls(self, self.push_application, self.pop_application)
class ContextStackManager(object):
"""Helper class for context objects that manages a stack of
objects.
"""
def __init__(self):
self._global = []
self._thread_context_lock = ThreadLock()
self._thread_context = thread_local()
self._greenlet_context_lock = GreenletRLock()
self._greenlet_context = greenlet_local()
self._cache = {}
self._stackop = get_iterator_next_method(count())
def iter_context_objects(self):
"""Returns an iterator over all objects for the combined
application and context cache.
"""
use_gevent = is_gevent_enabled()
tid = greenlet_get_ident() if use_gevent else thread_get_ident()
objects = self._cache.get(tid)
if objects is None:
if len(self._cache) > _MAX_CONTEXT_OBJECT_CACHE:
self._cache.clear()
objects = self._global[:]
objects.extend(getattr(self._thread_context, 'stack', ()))
if use_gevent:
objects.extend(getattr(self._greenlet_context, 'stack', ()))
objects.sort(reverse=True)
objects = [x[1] for x in objects]
self._cache[tid] = objects
return iter(objects)
def push_greenlet(self, obj):
self._greenlet_context_lock.acquire()
try:
self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
item = (self._stackop(), obj)
stack = getattr(self._greenlet_context, 'stack', None)
if stack is None:
self._greenlet_context.stack = [item]
else:
stack.append(item)
finally:
self._greenlet_context_lock.release()
def pop_greenlet(self):
self._greenlet_context_lock.acquire()
try:
self._cache.pop(greenlet_get_ident(), None) # remote chance to conflict with thread ids
stack = getattr(self._greenlet_context, 'stack', None)
assert stack, 'no objects on stack'
return stack.pop()[1]
finally:
self._greenlet_context_lock.release()
def push_thread(self, obj):
self._thread_context_lock.acquire()
try:
self._cache.pop(thread_get_ident(), None)
item = (self._stackop(), obj)
stack = getattr(self._thread_context, 'stack', None)
if stack is None:
self._thread_context.stack = [item]
else:
stack.append(item)
finally:
self._thread_context_lock.release()
def pop_thread(self):
self._thread_context_lock.acquire()
try:
self._cache.pop(thread_get_ident(), None)
stack = getattr(self._thread_context, 'stack', None)
assert stack, 'no objects on stack'
return stack.pop()[1]
finally:
self._thread_context_lock.release()
def push_application(self, obj):
self._global.append((self._stackop(), obj))
self._cache.clear()
def pop_application(self):
assert self._global, 'no objects on application stack'
popped = self._global.pop()[1]
self._cache.clear()
return popped
| agustinhenze/logbook.debian | logbook/_fallback.py | Python | bsd-3-clause | 6,767 | 0.000591 |
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Usage:
# python bpacking_simd_codegen.py 128 > bpacking_simd128_generated.h
# python bpacking_simd_codegen.py 256 > bpacking_simd256_generated.h
# python bpacking_simd_codegen.py 512 > bpacking_simd512_generated.h
from functools import partial
import sys
from textwrap import dedent, indent
class UnpackGenerator:
def __init__(self, simd_width):
self.simd_width = simd_width
if simd_width % 32 != 0:
raise("SIMD bit width should be a multiple of 32")
self.simd_byte_width = simd_width // 8
def print_unpack_bit0_func(self):
print(
"inline static const uint32_t* unpack0_32(const uint32_t* in, uint32_t* out) {")
print(" memset(out, 0x0, 32 * sizeof(*out));")
print(" out += 32;")
print("")
print(" return in;")
print("}")
def print_unpack_bit32_func(self):
print(
"inline static const uint32_t* unpack32_32(const uint32_t* in, uint32_t* out) {")
print(" memcpy(out, in, 32 * sizeof(*out));")
print(" in += 32;")
print(" out += 32;")
print("")
print(" return in;")
print("}")
def print_unpack_bit_func(self, bit):
def p(code):
print(indent(code, prefix=' '))
shift = 0
shifts = []
in_index = 0
inls = []
mask = (1 << bit) - 1
bracket = "{"
print(f"inline static const uint32_t* unpack{bit}_32(const uint32_t* in, uint32_t* out) {{")
p(dedent(f"""\
uint32_t mask = 0x{mask:0x};
simd_batch masks(mask);
simd_batch words, shifts;
simd_batch results;
"""))
def safe_load(index):
return f"SafeLoad<uint32_t>(in + {index})"
for i in range(32):
if shift + bit == 32:
shifts.append(shift)
inls.append(safe_load(in_index))
in_index += 1
shift = 0
elif shift + bit > 32: # cross the boundary
inls.append(
f"{safe_load(in_index)} >> {shift} | {safe_load(in_index + 1)} << {32 - shift}")
in_index += 1
shift = bit - (32 - shift)
shifts.append(0) # zero shift
else:
shifts.append(shift)
inls.append(safe_load(in_index))
shift += bit
bytes_per_batch = self.simd_byte_width
words_per_batch = bytes_per_batch // 4
one_word_template = dedent("""\
words = simd_batch{{ {words} }};
shifts = simd_batch{{ {shifts} }};
results = (words >> shifts) & masks;
results.store_unaligned(out);
out += {words_per_batch};
""")
for start in range(0, 32, words_per_batch):
stop = start + words_per_batch;
p(f"""// extract {bit}-bit bundles {start} to {stop - 1}""")
p(one_word_template.format(
words=", ".join(inls[start:stop]),
shifts=", ".join(map(str, shifts[start:stop])),
words_per_batch=words_per_batch))
p(dedent(f"""\
in += {bit};
return in;"""))
print("}")
def print_copyright():
print(dedent("""\
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
"""))
def print_note():
print("// Automatically generated file; DO NOT EDIT.")
print()
def main(simd_width):
print_copyright()
print_note()
struct_name = f"UnpackBits{simd_width}"
# NOTE: templating the UnpackBits struct on the dispatch level avoids
# potential name collisions if there are several UnpackBits generations
# with the same SIMD width on a given architecture.
print(dedent(f"""\
#pragma once
#include <cstdint>
#include <cstring>
#include <xsimd/xsimd.hpp>
#include "arrow/util/dispatch.h"
#include "arrow/util/ubsan.h"
namespace arrow {{
namespace internal {{
namespace {{
using ::arrow::util::SafeLoad;
template <DispatchLevel level>
struct {struct_name} {{
using simd_batch = xsimd::make_sized_batch_t<uint32_t, {simd_width//32}>;
"""))
gen = UnpackGenerator(simd_width)
gen.print_unpack_bit0_func()
print()
for i in range(1, 32):
gen.print_unpack_bit_func(i)
print()
gen.print_unpack_bit32_func()
print()
print(dedent(f"""\
}}; // struct {struct_name}
}} // namespace
}} // namespace internal
}} // namespace arrow
"""))
if __name__ == '__main__':
usage = f"""Usage: {__file__} <SIMD bit-width>"""
if len(sys.argv) != 2:
raise ValueError(usage)
try:
simd_width = int(sys.argv[1])
except ValueError:
raise ValueError(usage)
main(simd_width)
| kou/arrow | cpp/src/arrow/util/bpacking_simd_codegen.py | Python | apache-2.0 | 6,630 | 0.001056 |
from SubChoosers.ISubStagesChooser import ISubStagesChooser
from SubRankers.ByFullNameSubStagesRanker import ByFullNameSubStagesRanker
from Utils import WriteDebug
class FirstInCertainSubStagesChooser(ISubStagesChooser):
""" Implementation of ISubStagesChooser. This chooser return results after
ranking them in the ByFullNameSubStagesRanker,
In practice, the chooser uses the ByFullNameSubStagesRanker ranker, and
return the first SubStages after the rank, regardless of the value in
first_is_certain.
"""
@classmethod
def chooseMovieSubStageFromMoviesSubStages(cls, movie_sub_stages, query):
""" Choose the first MovieSubStage after ranking the Results using the
most accurate ranker avaliable (ByFullNameSubStagesRanker).
The function will return MovieSubStage even if first_is_certain is
False for the first MovieSubStage that the ranker returned. Will
return None if movie_sub_stages is empty.
"""
movie_sub_stage = None
if movie_sub_stages:
WriteDebug('Got results in movie_sub_stages, sending them to the ranker')
(movie_sub_stages, first_is_ceratin) = ByFullNameSubStagesRanker\
.rankMovieSubStages(movie_sub_stages, query)
WriteDebug('Ranker returned %s for first_is_certain, but we dont care' % first_is_ceratin)
movie_sub_stage = movie_sub_stages[0]
WriteDebug('MovieSubStage: %s' % movie_sub_stage.info())
else:
WriteDebug('There is not results in movie_sub_stages, returning None')
return movie_sub_stage
@classmethod
def chooseVersionSubStageFromVersionSubStages\
(cls, version_sub_stages, movie_sub_stages, query):
""" Choose the first VersionSubStage after ranking the Results using
the most accurate ranker avaliable (ByFullNameSubStagesRanker).
The function will return VersionSubStage even if first_is_certain
is False for the first VersionSubStage that the ranker returned.
Will return None if version_sub_stages is empty.
"""
version_sub_stage = None
if version_sub_stages:
WriteDebug('Got Versions in version_sub_stages, sending them to the ranker')
(version_sub_stages, first_is_ceratin) = ByFullNameSubStagesRanker\
.rankVersionSubStages(version_sub_stages, query)
WriteDebug('Ranker returned %s for first_is_certain, but we dont care' % first_is_ceratin)
version_sub_stage = version_sub_stages[0]
WriteDebug('VersionSubStage: %s' % version_sub_stage.info())
else:
WriteDebug('There is not results in version_sub_stages, returning None')
return version_sub_stage
| yosi-dediashvili/SubiT | src/SubChoosers/FirstInCertainSubStagesChooser.py | Python | gpl-3.0 | 2,895 | 0.005181 |
#!/usr/bin/env python
__all__ = ['acfun_download']
from ..common import *
from .letv import letvcloud_download_by_vu
from .qq import qq_download_by_vid
from .sina import sina_download_by_vid
from .tudou import tudou_download_by_iid
from .youku import youku_download_by_vid
import json, re
def get_srt_json(id):
url = 'http://danmu.aixifan.com/V2/%s' % id
return get_html(url)
def acfun_download_by_vid(vid, title, output_dir='.', merge=True, info_only=False, **kwargs):
info = json.loads(get_html('http://www.acfun.tv/video/getVideo.aspx?id=' + vid))
sourceType = info['sourceType']
if 'sourceId' in info: sourceId = info['sourceId']
# danmakuId = info['danmakuId']
if sourceType == 'sina':
sina_download_by_vid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'youku':
youku_download_by_vid(sourceId, title=title, output_dir=output_dir, merge=merge, info_only=info_only, **kwargs)
elif sourceType == 'tudou':
tudou_download_by_iid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'qq':
qq_download_by_vid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'letv':
letvcloud_download_by_vu(sourceId, '2d8c027396', title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'zhuzhan':
a = 'http://api.aixifan.com/plays/%s/realSource' % vid
s = json.loads(get_content(a, headers={'deviceType': '1'}))
urls = s['data']['files'][-1]['url']
size = urls_size(urls)
print_info(site_info, title, 'mp4', size)
if not info_only:
download_urls(urls, title, 'mp4', size,
output_dir=output_dir, merge=merge)
else:
raise NotImplementedError(sourceType)
if not info_only and not dry_run:
if not kwargs['caption']:
print('Skipping danmaku.')
return
try:
title = get_filename(title)
print('Downloading %s ...\n' % (title + '.cmt.json'))
cmt = get_srt_json(vid)
with open(os.path.join(output_dir, title + '.cmt.json'), 'w', encoding='utf-8') as x:
x.write(cmt)
except:
pass
def acfun_download(url, output_dir='.', merge=True, info_only=False, **kwargs):
assert re.match(r'http://[^\.]+.acfun.[^\.]+/\D/\D\D(\d+)', url)
html = get_html(url)
title = r1(r'<h1 id="txt-title-view">([^<>]+)<', html)
title = unescape_html(title)
title = escape_file_path(title)
assert title
videos = re.findall("data-vid=\"(\d+)\".*href=\"[^\"]+\".*title=\"([^\"]+)\"", html)
for video in videos:
p_vid = video[0]
p_title = title + " - " + video[1] if video[1] != '删除标签' else title
acfun_download_by_vid(p_vid, p_title,
output_dir=output_dir,
merge=merge,
info_only=info_only,
**kwargs)
site_info = "AcFun.tv"
download = acfun_download
download_playlist = playlist_not_supported('acfun')
| lilydjwg/you-get | src/you_get/extractors/acfun.py | Python | mit | 3,211 | 0.005308 |
# -*- coding: utf-8 -*-
#
# Slave, (c) 2014, see AUTHORS. Licensed under the GNU GPL.
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from future.builtins import *
from slave.lakeshore import LS340, LS370
from slave.transport import SimulatedTransport
def test_ls340():
# Test if instantiation fails
LS340(SimulatedTransport())
def test_ls370():
# Test if instantiation fails
LS370(SimulatedTransport())
| therealpyro/slave | slave/test/test_lakeshore.py | Python | gpl-3.0 | 488 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, GeekChimp - Franck Nijhof <[email protected]>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: osx_defaults
author: Franck Nijhof (@frenck)
short_description: osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible
description:
- osx_defaults allows users to read, write, and delete Mac OS X user defaults from Ansible scripts.
Mac OS X applications and other programs use the defaults system to record user preferences and other
information that must be maintained when the applications aren't running (such as default font for new
documents, or the position of an Info panel).
version_added: "2.0"
options:
domain:
description:
- The domain is a domain name of the form com.companyname.appname.
required: false
default: NSGlobalDomain
host:
description:
- The host on which the preference should apply. The special value "currentHost" corresponds to the
"-currentHost" switch of the defaults commandline tool.
required: false
default: null
version_added: "2.1"
key:
description:
- The key of the user preference
required: true
type:
description:
- The type of value to write.
required: false
default: string
choices: [ "array", "bool", "boolean", "date", "float", "int", "integer", "string" ]
array_add:
description:
- Add new elements to the array for a key which has an array as its value.
required: false
default: false
choices: [ "true", "false" ]
value:
description:
- The value to write. Only required when state = present.
required: false
default: null
state:
description:
- The state of the user defaults
required: false
default: present
choices: [ "present", "absent" ]
notes:
- Apple Mac caches defaults. You may need to logout and login to apply the changes.
'''
EXAMPLES = '''
- osx_defaults:
domain: com.apple.Safari
key: IncludeInternalDebugMenu
type: bool
value: true
state: present
- osx_defaults:
domain: NSGlobalDomain
key: AppleMeasurementUnits
type: string
value: Centimeters
state: present
- osx_defaults:
domain: com.apple.screensaver
host: currentHost
key: showClock
type: int
value: 1
- osx_defaults:
key: AppleMeasurementUnits
type: string
value: Centimeters
- osx_defaults:
key: AppleLanguages
type: array
value:
- en
- nl
- osx_defaults:
domain: com.geekchimp.macable
key: ExampleKeyToRemove
state: absent
'''
import datetime
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
# exceptions --------------------------------------------------------------- {{{
class OSXDefaultsException(Exception):
pass
# /exceptions -------------------------------------------------------------- }}}
# class MacDefaults -------------------------------------------------------- {{{
class OSXDefaults(object):
""" Class to manage Mac OS user defaults """
# init ---------------------------------------------------------------- {{{
""" Initialize this module. Finds 'defaults' executable and preps the parameters """
def __init__(self, **kwargs):
# Initial var for storing current defaults value
self.current_value = None
# Just set all given parameters
for key, val in kwargs.items():
setattr(self, key, val)
# Try to find the defaults executable
self.executable = self.module.get_bin_path(
'defaults',
required=False,
opt_dirs=self.path.split(':'),
)
if not self.executable:
raise OSXDefaultsException("Unable to locate defaults executable.")
# When state is present, we require a parameter
if self.state == "present" and self.value is None:
raise OSXDefaultsException("Missing value parameter")
# Ensure the value is the correct type
self.value = self._convert_type(self.type, self.value)
# /init --------------------------------------------------------------- }}}
# tools --------------------------------------------------------------- {{{
""" Converts value to given type """
def _convert_type(self, type, value):
if type == "string":
return str(value)
elif type in ["bool", "boolean"]:
if isinstance(value, basestring):
value = value.lower()
if value in [True, 1, "true", "1", "yes"]:
return True
elif value in [False, 0, "false", "0", "no"]:
return False
raise OSXDefaultsException("Invalid boolean value: {0}".format(repr(value)))
elif type == "date":
try:
return datetime.datetime.strptime(value.split("+")[0].strip(), "%Y-%m-%d %H:%M:%S")
except ValueError:
raise OSXDefaultsException(
"Invalid date value: {0}. Required format yyy-mm-dd hh:mm:ss.".format(repr(value))
)
elif type in ["int", "integer"]:
if not str(value).isdigit():
raise OSXDefaultsException("Invalid integer value: {0}".format(repr(value)))
return int(value)
elif type == "float":
try:
value = float(value)
except ValueError:
raise OSXDefaultsException("Invalid float value: {0}".format(repr(value)))
return value
elif type == "array":
if not isinstance(value, list):
raise OSXDefaultsException("Invalid value. Expected value to be an array")
return value
raise OSXDefaultsException('Type is not supported: {0}'.format(type))
""" Returns a normalized list of commandline arguments based on the "host" attribute """
def _host_args(self):
if self.host is None:
return []
elif self.host == 'currentHost':
return ['-currentHost']
else:
return ['-host', self.host]
""" Returns a list containing the "defaults" executable and any common base arguments """
def _base_command(self):
return [self.executable] + self._host_args()
""" Converts array output from defaults to an list """
@staticmethod
def _convert_defaults_str_to_list(value):
# Split output of defaults. Every line contains a value
value = value.splitlines()
# Remove first and last item, those are not actual values
value.pop(0)
value.pop(-1)
# Remove extra spaces and comma (,) at the end of values
value = [re.sub(',$', '', x.strip(' ')) for x in value]
return value
# /tools -------------------------------------------------------------- }}}
# commands ------------------------------------------------------------ {{{
""" Reads value of this domain & key from defaults """
def read(self):
# First try to find out the type
rc, out, err = self.module.run_command(self._base_command() + ["read-type", self.domain, self.key])
# If RC is 1, the key does not exists
if rc == 1:
return None
# If the RC is not 0, then terrible happened! Ooooh nooo!
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key type from defaults: " + out)
# Ok, lets parse the type from output
type = out.strip().replace('Type is ', '')
# Now get the current value
rc, out, err = self.module.run_command(self._base_command() + ["read", self.domain, self.key])
# Strip output
out = out.strip()
# An non zero RC at this point is kinda strange...
if rc != 0:
raise OSXDefaultsException("An error occurred while reading key value from defaults: " + out)
# Convert string to list when type is array
if type == "array":
out = self._convert_defaults_str_to_list(out)
# Store the current_value
self.current_value = self._convert_type(type, out)
""" Writes value to this domain & key to defaults """
def write(self):
# We need to convert some values so the defaults commandline understands it
if isinstance(self.value, bool):
if self.value:
value = "TRUE"
else:
value = "FALSE"
elif isinstance(self.value, (int, float)):
value = str(self.value)
elif self.array_add and self.current_value is not None:
value = list(set(self.value) - set(self.current_value))
elif isinstance(self.value, datetime.datetime):
value = self.value.strftime('%Y-%m-%d %H:%M:%S')
else:
value = self.value
# When the type is array and array_add is enabled, morph the type :)
if self.type == "array" and self.array_add:
self.type = "array-add"
# All values should be a list, for easy passing it to the command
if not isinstance(value, list):
value = [value]
rc, out, err = self.module.run_command(self._base_command() + ['write', self.domain, self.key, '-' + self.type] + value)
if rc != 0:
raise OSXDefaultsException('An error occurred while writing value to defaults: ' + out)
""" Deletes defaults key from domain """
def delete(self):
rc, out, err = self.module.run_command(self._base_command() + ['delete', self.domain, self.key])
if rc != 0:
raise OSXDefaultsException("An error occurred while deleting key from defaults: " + out)
# /commands ----------------------------------------------------------- }}}
# run ----------------------------------------------------------------- {{{
""" Does the magic! :) """
def run(self):
# Get the current value from defaults
self.read()
# Handle absent state
if self.state == "absent":
if self.current_value is None:
return False
if self.module.check_mode:
return True
self.delete()
return True
# There is a type mismatch! Given type does not match the type in defaults
value_type = type(self.value)
if self.current_value is not None and not isinstance(self.current_value, value_type):
raise OSXDefaultsException("Type mismatch. Type in defaults: " + type(self.current_value).__name__)
# Current value matches the given value. Nothing need to be done. Arrays need extra care
if self.type == "array" and self.current_value is not None and not self.array_add and \
set(self.current_value) == set(self.value):
return False
elif self.type == "array" and self.current_value is not None and self.array_add and \
len(list(set(self.value) - set(self.current_value))) == 0:
return False
elif self.current_value == self.value:
return False
if self.module.check_mode:
return True
# Change/Create/Set given key/value for domain in defaults
self.write()
return True
# /run ---------------------------------------------------------------- }}}
# /class MacDefaults ------------------------------------------------------ }}}
# main -------------------------------------------------------------------- {{{
def main():
module = AnsibleModule(
argument_spec=dict(
domain=dict(
default="NSGlobalDomain",
required=False,
),
host=dict(
default=None,
required=False,
),
key=dict(
default=None,
),
type=dict(
default="string",
required=False,
choices=[
"array",
"bool",
"boolean",
"date",
"float",
"int",
"integer",
"string",
],
),
array_add=dict(
default=False,
required=False,
type='bool',
),
value=dict(
default=None,
required=False,
),
state=dict(
default="present",
required=False,
choices=[
"absent", "present"
],
),
path=dict(
default="/usr/bin:/usr/local/bin",
required=False,
)
),
supports_check_mode=True,
)
domain = module.params['domain']
host = module.params['host']
key = module.params['key']
type = module.params['type']
array_add = module.params['array_add']
value = module.params['value']
state = module.params['state']
path = module.params['path']
try:
defaults = OSXDefaults(module=module, domain=domain, host=host, key=key, type=type,
array_add=array_add, value=value, state=state, path=path)
changed = defaults.run()
module.exit_json(changed=changed)
except OSXDefaultsException:
e = get_exception()
module.fail_json(msg=e.message)
# /main ------------------------------------------------------------------- }}}
if __name__ == '__main__':
main()
| kaarolch/ansible | lib/ansible/modules/system/osx_defaults.py | Python | gpl-3.0 | 14,430 | 0.003119 |
from __future__ import print_function
import pytest
from website.app import init_app
from tests.json_api_test_app import JSONAPITestApp
@pytest.fixture()
def app():
return JSONAPITestApp()
# NOTE: autouse so that ADDONS_REQUESTED gets set on website.settings
@pytest.fixture(autouse=True, scope='session')
def app_init():
init_app(routes=False, set_backends=False)
| Johnetordoff/osf.io | api_tests/conftest.py | Python | apache-2.0 | 378 | 0.002646 |
# -*- coding: UTF-8 -*-
# COPYRIGHT (c) 2016 Cristóbal Ganter
#
# GNU AFFERO GENERAL PUBLIC LICENSE
# Version 3, 19 November 2007
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from functools import partialmethod
from weakref import finalize
from tornado.gen import coroutine
from src import messages as msg
from src.db import message_broker as mb
from src.pub_sub import MalformedMessageError, \
UnrecognizedOwnerError
_path = 'src.swclass'
class WSClass(object):
"""Attaches its methods to a controller.MSGHandler.
.. todo::
* Explain this class better XD.
"""
_path = '.'.join((_path, 'WSClass'))
def __init__(self, handler):
_path = msg.join_path(self._path, '__init__')
self.handler = handler
self.pub_subs = {
'w': self.handler.ws_pub_sub,
'd': mb,
'l': self.handler.local_pub_sub,
}
for attr_name in dir(self):
attribute = getattr(self, attr_name)
if hasattr(attribute, 'msg_types'):
for _type, channels in attribute.msg_types:
msg.code_debug(
_path,
'Adding action: %r ...' % attribute
)
self.register_action_in(
msg_type=_type, action=attribute,
channels=channels)
finalize(
self, msg.code_debug, self._path,
'Deleting WSClass {0} from {0.handler} '
'...'.format(self)
)
@property
def channels(self):
return self.pub_subs.keys()
def redirect_to(self, channel, message, content=False):
"""Redirect ``message`` through ``channel``.
If ``content`` is ``True``, then the the object
corresponding to the ``'content'`` key of
``message`` is sent.
:param str channel:
The channel through which ``message`` will be
sent.
:param dict message:
The message to be sent.
:param bool content:
If ``True``, just the object corresponding to
the ``'content'`` key of ``message`` will be
sent.
If ``False``, the whole message will be sent.
:raises MalformedMessageError:
If ``content`` is ``True``, but ``message``
doesn't have the ``'content'`` key.
:raises BadChannelArgumentError:
If ``channel`` is not one of ``self.pub_subs``
keys.
:raises NotDictError:
If ``message`` is not a dictionary.
:raises NoMessageTypeError:
If the message or it's content doesn't have the
``'type'`` key.
:raises NoActionForMsgTypeError:
If ``send_function`` of the ``PubSub`` object
wasn't specified during object creation and
there's no registered action for this message
type.
"""
try:
m = message['content'] if content else message
self.pub_subs[channel].send_message(m)
except KeyError as ke:
if 'content' not in message:
mme = MalformedMessageError(
"If content=True, then message must "
"have the 'content' key."
)
raise mme from ke
elif channel not in self.pub_subs:
raise \
BadChannelArgumentError(self.channels) \
from ke
else:
raise
redirect_content_to = partialmethod(redirect_to,
content=True)
def register_action_in(self, msg_type, action,
channels):
"""Register ``action`` in a set of channels.
:param str msg_type:
The message type to which ``action`` will be
subscribed.
:param callable action:
The action to be registered in ``channels``.
:param set channels:
Set of strings, which identify all the channels
to which ``action`` will be registered.
:raises BadChannelArgumentError:
If any channel is not one of ``self.pub_subs``
keys.
"""
try:
for channel in channels:
ps = self.pub_subs[channel]
ps.register(msg_type, action, self)
except KeyError as ke:
if not all(c in self.pub_subs
for c in channels):
raise \
BadChannelArgumentError(self.channels) \
from ke
else:
raise
def unregister(self):
for ps in self.pub_subs.values():
try:
ps.remove_owner(self)
except UnrecognizedOwnerError:
pass
@coroutine
def end(self):
self.unregister()
class subscribe(object):
"""Append the ``msg_types`` attribute to a method.
Each parameter should have one of the following forms:
``type``, ``(type, channel)`` or
``(type, {channel, ...})``. Where ``type`` is a string
containing the message_type to which you want the method
to be subscribed and ``channel`` is one of this strings:
``'w'``, ``'d'``, ``'l'``. The channel strings mean:
Websocket, Database and Local.
If there are only 2 string parameters and the second is
one character long then this parameters are interpreted
as ``subscribe(type, channel)``.
This class should be used as a decorator.
:raises TypeError:
If any element of ``msg_types`` is not a tuple or a
string.
:raises ValueError:
If any tuple in ``msg_types`` has a length different
than 2.
"""
_path = '.'.join((_path, 'subscribe'))
def __init__(self, *msg_types,
channels={'w', 'd', 'l'}):
if len(msg_types) == 2 and \
isinstance(msg_types[0], str) and \
isinstance(msg_types[1], str) and \
len(msg_types[1]) == 1:
msg_types = ((msg_types[0], msg_types[1]),)
for t in msg_types:
if not isinstance(t, (tuple, str)):
raise TypeError(
'msg_types has an element that is not '
'a tuple or a string.'
)
if isinstance(t, tuple) and len(t) != 2:
raise ValueError(
'msg_types has a tuple that has a '
'length different than 2.'
)
self.msg_types = [(t, channels)
for t in msg_types
if isinstance(t, str)]
self.msg_types.extend(
(t[0], {t[1]})
if isinstance(t[1], str)
else t
for t in msg_types
if isinstance(t, tuple)
)
def __call__(self, method):
_path = '.'.join((self._path, '__call__'))
msg.code_debug(
_path,
'Subscribing method {!r} to {!r} message types '
'...'.format(method, self.msg_types)
)
method.msg_types = self.msg_types
return method
class BadChannelArgumentError(ValueError):
def __init__(channels, *args):
super().__init__(
'The channel argument must be one of the '
'following strings: {}.'.format(channels),
*args
)
| TelematicaUSM/EduRT | src/wsclass.py | Python | agpl-3.0 | 8,134 | 0 |
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from . import models
from . import wizards
from . import controllers
from . import forms
| CompassionCH/compassion-switzerland | partner_communication_switzerland/__init__.py | Python | agpl-3.0 | 468 | 0 |
class Foo(object):
def set(self, value):
self.field = value
def get(self):
return self.field
a = Foo()
a.set("hello world")
z = a.get()
print z
z
a
| retoo/pystructure | s101g/examples/simple/simple.py | Python | lgpl-2.1 | 195 | 0.025641 |
#
# Copyright (C) 2012-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
"""
This module include functions and classes for dealing with multiple layouts in
Anaconda. It wraps the libxklavier functionality to protect Anaconda from
dealing with its "nice" API that looks like a Lisp-influenced "good old C" and
also systemd-localed functionality.
It provides a XklWrapper class with several methods that can be used for listing
and various modifications of keyboard layouts settings.
"""
import gi
gi.require_version("GdkX11", "3.0")
gi.require_version("Xkl", "1.0")
from gi.repository import GdkX11, Xkl
import threading
import gettext
from collections import namedtuple
from pyanaconda.core import util
from pyanaconda.core.configuration.anaconda import conf
from pyanaconda.core.constants import DEFAULT_KEYBOARD
from pyanaconda.keyboard import join_layout_variant, parse_layout_variant, KeyboardConfigError, InvalidLayoutVariantSpec
from pyanaconda.core.async_utils import async_action_wait
from pyanaconda.anaconda_loggers import get_module_logger
log = get_module_logger(__name__)
Xkb_ = lambda x: gettext.translation("xkeyboard-config", fallback=True).gettext(x)
iso_ = lambda x: gettext.translation("iso_639", fallback=True).gettext(x)
# namedtuple for information about a keyboard layout (its language and description)
LayoutInfo = namedtuple("LayoutInfo", ["lang", "desc"])
class XklWrapperError(KeyboardConfigError):
"""Exception class for reporting libxklavier-related problems"""
pass
class XklWrapper(object):
"""
Class wrapping the libxklavier functionality
Use this class as a singleton class because it provides read-only data
and initialization (that takes quite a lot of time) reads always the
same data. It doesn't have sense to make multiple instances
"""
_instance = None
_instance_lock = threading.Lock()
@staticmethod
def get_instance():
with XklWrapper._instance_lock:
if not XklWrapper._instance:
XklWrapper._instance = XklWrapper()
return XklWrapper._instance
def __init__(self):
#initialize Xkl-related stuff
display = GdkX11.x11_get_default_xdisplay()
self._engine = Xkl.Engine.get_instance(display)
self._rec = Xkl.ConfigRec()
if not self._rec.get_from_server(self._engine):
raise XklWrapperError("Failed to get configuration from server")
#X is probably initialized to the 'us' layout without any variant and
#since we want to add layouts with variants we need the layouts and
#variants lists to have the same length. Add "" padding to variants.
#See docstring of the add_layout method for details.
diff = len(self._rec.layouts) - len(self._rec.variants)
if diff > 0 and conf.system.can_activate_layouts:
self._rec.set_variants(self._rec.variants + (diff * [""]))
if not self._rec.activate(self._engine):
# failed to activate layouts given e.g. by a kickstart (may be
# invalid)
lay_var_str = ",".join(map(join_layout_variant,
self._rec.layouts,
self._rec.variants))
log.error("Failed to activate layouts: '%s', "
"falling back to default %s", lay_var_str, DEFAULT_KEYBOARD)
self._rec.set_layouts([DEFAULT_KEYBOARD])
self._rec.set_variants([""])
if not self._rec.activate(self._engine):
# failed to activate even the default layout, something is
# really wrong
raise XklWrapperError("Failed to initialize layouts")
#needed also for Gkbd.KeyboardDrawingDialog
self.configreg = Xkl.ConfigRegistry.get_instance(self._engine)
self.configreg.load(False)
self._layout_infos = dict()
self._layout_infos_lock = threading.RLock()
self._switch_opt_infos = dict()
self._switch_opt_infos_lock = threading.RLock()
#this might take quite a long time
self.configreg.foreach_language(self._get_language_variants, None)
self.configreg.foreach_country(self._get_country_variants, None)
#'grp' means that we want layout (group) switching options
self.configreg.foreach_option('grp', self._get_switch_option, None)
def _get_lang_variant(self, c_reg, item, subitem, lang):
if subitem:
name = item.get_name() + " (" + subitem.get_name() + ")"
description = subitem.get_description()
else:
name = item.get_name()
description = item.get_description()
#if this layout has already been added for some other language,
#do not add it again (would result in duplicates in our lists)
if name not in self._layout_infos:
with self._layout_infos_lock:
self._layout_infos[name] = LayoutInfo(lang, description)
def _get_country_variant(self, c_reg, item, subitem, country):
if subitem:
name = item.get_name() + " (" + subitem.get_name() + ")"
description = subitem.get_description()
else:
name = item.get_name()
description = item.get_description()
# if the layout was not added with any language, add it with a country
if name not in self._layout_infos:
with self._layout_infos_lock:
self._layout_infos[name] = LayoutInfo(country, description)
def _get_language_variants(self, c_reg, item, user_data=None):
lang_name, lang_desc = item.get_name(), item.get_description()
c_reg.foreach_language_variant(lang_name, self._get_lang_variant, lang_desc)
def _get_country_variants(self, c_reg, item, user_data=None):
country_name, country_desc = item.get_name(), item.get_description()
c_reg.foreach_country_variant(country_name, self._get_country_variant,
country_desc)
def _get_switch_option(self, c_reg, item, user_data=None):
"""Helper function storing layout switching options in foreach cycle"""
desc = item.get_description()
name = item.get_name()
with self._switch_opt_infos_lock:
self._switch_opt_infos[name] = desc
def get_current_layout(self):
"""
Get current activated X layout and variant
:return: current activated X layout and variant (e.g. "cz (qwerty)")
"""
# ported from the widgets/src/LayoutIndicator.c code
self._engine.start_listen(Xkl.EngineListenModes.TRACK_KEYBOARD_STATE)
state = self._engine.get_current_state()
cur_group = state.group
num_groups = self._engine.get_num_groups()
# BUG?: if the last layout in the list is activated and removed,
# state.group may be equal to n_groups
if cur_group >= num_groups:
cur_group = num_groups - 1
layout = self._rec.layouts[cur_group] # pylint: disable=unsubscriptable-object
try:
variant = self._rec.variants[cur_group] # pylint: disable=unsubscriptable-object
except IndexError:
# X server may have forgotten to add the "" variant for its default layout
variant = ""
self._engine.stop_listen(Xkl.EngineListenModes.TRACK_KEYBOARD_STATE)
return join_layout_variant(layout, variant)
def get_available_layouts(self):
"""A list of layouts"""
with self._layout_infos_lock:
return list(self._layout_infos.keys())
def get_switching_options(self):
"""Method returning list of available layout switching options"""
with self._switch_opt_infos_lock:
return list(self._switch_opt_infos.keys())
def get_layout_variant_description(self, layout_variant, with_lang=True, xlated=True):
"""
Get description of the given layout-variant.
:param layout_variant: layout-variant specification (e.g. 'cz (qwerty)')
:type layout_variant: str
:param with_lang: whether to include language of the layout-variant (if defined)
in the description or not
:type with_lang: bool
:param xlated: whethe to return translated or english version of the description
:type xlated: bool
:return: description of the layout-variant specification (e.g. 'Czech (qwerty)')
:rtype: str
"""
layout_info = self._layout_infos[layout_variant]
# translate language and upcase its first letter, translate the
# layout-variant description
if xlated:
lang = util.upcase_first_letter(iso_(layout_info.lang))
description = Xkb_(layout_info.desc)
else:
lang = util.upcase_first_letter(layout_info.lang)
description = layout_info.desc
if with_lang and lang and not description.startswith(lang):
return "%s (%s)" % (lang, description)
else:
return description
def get_switch_opt_description(self, switch_opt):
"""
Get description of the given layout switching option.
:param switch_opt: switching option name/ID (e.g. 'grp:alt_shift_toggle')
:type switch_opt: str
:return: description of the layout switching option (e.g. 'Alt + Shift')
:rtype: str
"""
# translate the description of the switching option
return Xkb_(self._switch_opt_infos[switch_opt])
@async_action_wait
def activate_default_layout(self):
"""
Activates default layout (the first one in the list of configured
layouts).
"""
self._engine.lock_group(0)
def is_valid_layout(self, layout):
"""Return if given layout is valid layout or not"""
return layout in self._layout_infos
@async_action_wait
def add_layout(self, layout):
"""
Method that tries to add a given layout to the current X configuration.
The X layouts configuration is handled by two lists. A list of layouts
and a list of variants. Index-matching items in these lists (as if they
were zipped) are used for the construction of real layouts (e.g.
'cz (qwerty)').
:param layout: either 'layout' or 'layout (variant)'
:raise XklWrapperError: if the given layout is invalid or cannot be added
"""
try:
#we can get 'layout' or 'layout (variant)'
(layout, variant) = parse_layout_variant(layout)
except InvalidLayoutVariantSpec as ilverr:
raise XklWrapperError("Failed to add layout: %s" % ilverr)
#do not add the same layout-variant combinanion multiple times
if (layout, variant) in list(zip(self._rec.layouts, self._rec.variants)):
return
self._rec.set_layouts(self._rec.layouts + [layout])
self._rec.set_variants(self._rec.variants + [variant])
if not self._rec.activate(self._engine):
raise XklWrapperError("Failed to add layout '%s (%s)'" % (layout,
variant))
@async_action_wait
def remove_layout(self, layout):
"""
Method that tries to remove a given layout from the current X
configuration.
See also the documentation for the add_layout method.
:param layout: either 'layout' or 'layout (variant)'
:raise XklWrapperError: if the given layout cannot be removed
"""
#we can get 'layout' or 'layout (variant)'
(layout, variant) = parse_layout_variant(layout)
layouts_variants = list(zip(self._rec.layouts, self._rec.variants))
if (layout, variant) not in layouts_variants:
msg = "'%s (%s)' not in the list of added layouts" % (layout,
variant)
raise XklWrapperError(msg)
idx = layouts_variants.index((layout, variant))
new_layouts = self._rec.layouts[:idx] + self._rec.layouts[(idx + 1):] # pylint: disable=unsubscriptable-object
new_variants = self._rec.variants[:idx] + self._rec.variants[(idx + 1):] # pylint: disable=unsubscriptable-object
self._rec.set_layouts(new_layouts)
self._rec.set_variants(new_variants)
if not self._rec.activate(self._engine):
raise XklWrapperError("Failed to remove layout '%s (%s)'" % (layout,
variant))
@async_action_wait
def replace_layouts(self, layouts_list):
"""
Method that replaces the layouts defined in the current X configuration
with the new ones given.
:param layouts_list: list of layouts defined as either 'layout' or
'layout (variant)'
:raise XklWrapperError: if layouts cannot be replaced with the new ones
"""
new_layouts = list()
new_variants = list()
for layout_variant in layouts_list:
(layout, variant) = parse_layout_variant(layout_variant)
new_layouts.append(layout)
new_variants.append(variant)
self._rec.set_layouts(new_layouts)
self._rec.set_variants(new_variants)
if not self._rec.activate(self._engine):
msg = "Failed to replace layouts with: %s" % ",".join(layouts_list)
raise XklWrapperError(msg)
@async_action_wait
def set_switching_options(self, options):
"""
Method that sets options for layout switching. It replaces the old
options with the new ones.
:param options: layout switching options to be set
:type options: list or generator
:raise XklWrapperError: if the old options cannot be replaced with the
new ones
"""
#preserve old "non-switching options"
new_options = [opt for opt in self._rec.options if "grp:" not in opt] # pylint: disable=not-an-iterable
new_options += options
self._rec.set_options(new_options)
if not self._rec.activate(self._engine):
msg = "Failed to set switching options to: %s" % ",".join(options)
raise XklWrapperError(msg)
| atodorov/anaconda | pyanaconda/ui/gui/xkl_wrapper.py | Python | gpl-2.0 | 15,372 | 0.003578 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.